`;
17 | } catch (error) {
18 | return `${error.toString()}`
19 | }
20 |
21 | });
22 |
23 | return inline.replace(/\$\$([\s\S]*?)\$\$/g, function(m, code) {
24 |
25 | try {
26 | return `${katex.renderToString(code.trim())}`;
27 | } catch (error) {
28 | return `${error.toString()}`
29 | }
30 |
31 | });
32 |
33 | })
34 |
35 | hook.afterEach(function(html, next) {
36 | let parsed = html.replace(/([\s\S]*?)<\/katex>/g, function(m, code) {
37 | return code;
38 | });
39 | next(parsed);
40 | })
41 |
42 | };
43 | }
44 |
45 | })(window.$docsify,window.katex);
--------------------------------------------------------------------------------
/projects/first-neural-network/Bike-Sharing-Dataset/Readme.txt:
--------------------------------------------------------------------------------
1 | ==========================================
2 | Bike Sharing Dataset
3 | ==========================================
4 |
5 | Hadi Fanaee-T
6 |
7 | Laboratory of Artificial Intelligence and Decision Support (LIAAD), University of Porto
8 | INESC Porto, Campus da FEUP
9 | Rua Dr. Roberto Frias, 378
10 | 4200 - 465 Porto, Portugal
11 |
12 |
13 | =========================================
14 | Background
15 | =========================================
16 |
17 | Bike sharing systems are new generation of traditional bike rentals where whole process from membership, rental and return
18 | back has become automatic. Through these systems, user is able to easily rent a bike from a particular position and return
19 | back at another position. Currently, there are about over 500 bike-sharing programs around the world which is composed of
20 | over 500 thousands bicycles. Today, there exists great interest in these systems due to their important role in traffic,
21 | environmental and health issues.
22 |
23 | Apart from interesting real world applications of bike sharing systems, the characteristics of data being generated by
24 | these systems make them attractive for the research. Opposed to other transport services such as bus or subway, the duration
25 | of travel, departure and arrival position is explicitly recorded in these systems. This feature turns bike sharing system into
26 | a virtual sensor network that can be used for sensing mobility in the city. Hence, it is expected that most of important
27 | events in the city could be detected via monitoring these data.
28 |
29 | =========================================
30 | Data Set
31 | =========================================
32 | Bike-sharing rental process is highly correlated to the environmental and seasonal settings. For instance, weather conditions,
33 | precipitation, day of week, season, hour of the day, etc. can affect the rental behaviors. The core data set is related to
34 | the two-year historical log corresponding to years 2011 and 2012 from Capital Bikeshare system, Washington D.C., USA which is
35 | publicly available in http://capitalbikeshare.com/system-data. We aggregated the data on two hourly and daily basis and then
36 | extracted and added the corresponding weather and seasonal information. Weather information are extracted from http://www.freemeteo.com.
37 |
38 | =========================================
39 | Associated tasks
40 | =========================================
41 |
42 | - Regression:
43 | Predication of bike rental count hourly or daily based on the environmental and seasonal settings.
44 |
45 | - Event and Anomaly Detection:
46 | Count of rented bikes are also correlated to some events in the town which easily are traceable via search engines.
47 | For instance, query like "2012-10-30 washington d.c." in Google returns related results to Hurricane Sandy. Some of the important events are
48 | identified in [1]. Therefore the data can be used for validation of anomaly or event detection algorithms as well.
49 |
50 |
51 | =========================================
52 | Files
53 | =========================================
54 |
55 | - Readme.txt
56 | - hour.csv : bike sharing counts aggregated on hourly basis. Records: 17379 hours
57 | - day.csv - bike sharing counts aggregated on daily basis. Records: 731 days
58 |
59 |
60 | =========================================
61 | Dataset characteristics
62 | =========================================
63 | Both hour.csv and day.csv have the following fields, except hr which is not available in day.csv
64 |
65 | - instant: record index
66 | - dteday : date
67 | - season : season (1:springer, 2:summer, 3:fall, 4:winter)
68 | - yr : year (0: 2011, 1:2012)
69 | - mnth : month ( 1 to 12)
70 | - hr : hour (0 to 23)
71 | - holiday : weather day is holiday or not (extracted from http://dchr.dc.gov/page/holiday-schedule)
72 | - weekday : day of the week
73 | - workingday : if day is neither weekend nor holiday is 1, otherwise is 0.
74 | + weathersit :
75 | - 1: Clear, Few clouds, Partly cloudy, Partly cloudy
76 | - 2: Mist + Cloudy, Mist + Broken clouds, Mist + Few clouds, Mist
77 | - 3: Light Snow, Light Rain + Thunderstorm + Scattered clouds, Light Rain + Scattered clouds
78 | - 4: Heavy Rain + Ice Pallets + Thunderstorm + Mist, Snow + Fog
79 | - temp : Normalized temperature in Celsius. The values are divided to 41 (max)
80 | - atemp: Normalized feeling temperature in Celsius. The values are divided to 50 (max)
81 | - hum: Normalized humidity. The values are divided to 100 (max)
82 | - windspeed: Normalized wind speed. The values are divided to 67 (max)
83 | - casual: count of casual users
84 | - registered: count of registered users
85 | - cnt: count of total rental bikes including both casual and registered
86 |
87 | =========================================
88 | License
89 | =========================================
90 | Use of this dataset in publications must be cited to the following publication:
91 |
92 | [1] Fanaee-T, Hadi, and Gama, Joao, "Event labeling combining ensemble detectors and background knowledge", Progress in Artificial Intelligence (2013): pp. 1-15, Springer Berlin Heidelberg, doi:10.1007/s13748-013-0040-3.
93 |
94 | @article{
95 | year={2013},
96 | issn={2192-6352},
97 | journal={Progress in Artificial Intelligence},
98 | doi={10.1007/s13748-013-0040-3},
99 | title={Event labeling combining ensemble detectors and background knowledge},
100 | url={http://dx.doi.org/10.1007/s13748-013-0040-3},
101 | publisher={Springer Berlin Heidelberg},
102 | keywords={Event labeling; Event detection; Ensemble learning; Background knowledge},
103 | author={Fanaee-T, Hadi and Gama, Joao},
104 | pages={1-15}
105 | }
106 |
107 | =========================================
108 | Contact
109 | =========================================
110 |
111 | For further information about this dataset please contact Hadi Fanaee-T (hadi.fanaee@fe.up.pt)
112 |
--------------------------------------------------------------------------------
/projects/first-neural-network/my_answers.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | class NeuralNetwork(object):
5 | def __init__(self, input_nodes, hidden_nodes, output_nodes, learning_rate):
6 | # Set number of nodes in input, hidden and output layers.
7 | self.input_nodes = input_nodes
8 | self.hidden_nodes = hidden_nodes
9 | self.output_nodes = output_nodes
10 |
11 | # Initialize weights
12 | self.weights_input_to_hidden = np.random.normal(0.0, self.input_nodes**-0.5,
13 | (self.input_nodes, self.hidden_nodes))
14 |
15 | self.weights_hidden_to_output = np.random.normal(0.0, self.hidden_nodes**-0.5,
16 | (self.hidden_nodes, self.output_nodes))
17 | self.lr = learning_rate
18 |
19 | #### TODO: Set self.activation_function to your implemented sigmoid function ####
20 | #
21 | # Note: in Python, you can define a function with a lambda expression,
22 | # as shown below.
23 | self.activation_function = lambda x : 1 / (1 + np.exp(-x)) # Replace 0 with your sigmoid calculation.
24 |
25 | ### If the lambda code above is not something you're familiar with,
26 | # You can uncomment out the following three lines and put your
27 | # implementation there instead.
28 | #
29 | #def sigmoid(x):
30 | # return 0 # Replace 0 with your sigmoid calculation here
31 | #self.activation_function = sigmoid
32 |
33 |
34 | def train(self, features, targets):
35 | ''' Train the network on batch of features and targets.
36 |
37 | Arguments
38 | ---------
39 |
40 | features: 2D array, each row is one data record, each column is a feature
41 | targets: 1D array of target values
42 |
43 | '''
44 | n_records = features.shape[0]
45 | delta_weights_i_h = np.zeros(self.weights_input_to_hidden.shape)
46 | delta_weights_h_o = np.zeros(self.weights_hidden_to_output.shape)
47 | for X, y in zip(features, targets):
48 |
49 | final_outputs, hidden_outputs = self.forward_pass_train(X) # Implement the forward pass function below
50 | # Implement the backproagation function below
51 | delta_weights_i_h, delta_weights_h_o = self.backpropagation(final_outputs, hidden_outputs, X, y,
52 | delta_weights_i_h, delta_weights_h_o)
53 | self.update_weights(delta_weights_i_h, delta_weights_h_o, n_records)
54 |
55 |
56 | def forward_pass_train(self, X):
57 | ''' Implement forward pass here
58 |
59 | Arguments
60 | ---------
61 | X: features batch
62 |
63 | '''
64 | #### Implement the forward pass here ####
65 | ### Forward pass ###
66 | # TODO: Hidden layer - Replace these values with your calculations.
67 | hidden_inputs = np.dot(X, self.weights_input_to_hidden) # signals into hidden layer
68 | hidden_outputs = self.activation_function(hidden_inputs) # signals from hidden layer
69 |
70 | # TODO: Output layer - Replace these values with your calculations.
71 | final_inputs = np.dot(hidden_outputs, self.weights_hidden_to_output) # signals into final output layer
72 | final_outputs = final_inputs # signals from final output layer
73 |
74 | return final_outputs, hidden_outputs
75 |
76 | def backpropagation(self, final_outputs, hidden_outputs, X, y, delta_weights_i_h, delta_weights_h_o):
77 | ''' Implement backpropagation
78 |
79 | Arguments
80 | ---------
81 | final_outputs: output from forward pass
82 | y: target (i.e. label) batch
83 | delta_weights_i_h: change in weights from input to hidden layers
84 | delta_weights_h_o: change in weights from hidden to output layers
85 |
86 | '''
87 | #### Implement the backward pass here ####
88 | ### Backward pass ###
89 |
90 | # TODO: Output error - Replace this value with your calculations.
91 | error = y - final_outputs # Output layer error is the difference between desired target and actual output.
92 |
93 | # TODO: Calculate the hidden layer's contribution to the error
94 | hidden_error = np.dot(self.weights_hidden_to_output, error)
95 |
96 | # TODO: Backpropagated error terms - Replace these values with your calculations.
97 | output_error_term = error * 1.0
98 |
99 | hidden_error_term = hidden_error * hidden_outputs * (1 - hidden_outputs)
100 |
101 | # Weight step (input to hidden)
102 | delta_weights_i_h += hidden_error_term * X[:, None]
103 | # Weight step (hidden to output)
104 | delta_weights_h_o += output_error_term * hidden_outputs[:, None]
105 | return delta_weights_i_h, delta_weights_h_o
106 |
107 | def update_weights(self, delta_weights_i_h, delta_weights_h_o, n_records):
108 | ''' Update weights on gradient descent step
109 |
110 | Arguments
111 | ---------
112 | delta_weights_i_h: change in weights from input to hidden layers
113 | delta_weights_h_o: change in weights from hidden to output layers
114 | n_records: number of records
115 |
116 | '''
117 | self.weights_hidden_to_output += self.lr * delta_weights_h_o / n_records # update hidden-to-output weights with gradient descent step
118 | self.weights_input_to_hidden += self.lr * delta_weights_i_h / n_records # update input-to-hidden weights with gradient descent step
119 |
120 | def run(self, features):
121 | ''' Run a forward pass through the network with input features
122 |
123 | Arguments
124 | ---------
125 | features: 1D array of feature values
126 | '''
127 |
128 | #### Implement the forward pass here ####
129 | # TODO: Hidden layer - replace these values with the appropriate calculations.
130 | hidden_inputs = np.dot(features, self.weights_input_to_hidden) # signals into hidden layer
131 | hidden_outputs = self.activation_function(hidden_inputs) # signals from hidden layer
132 |
133 | # TODO: Output layer - Replace these values with the appropriate calculations.
134 | final_inputs = np.dot(hidden_outputs, self.weights_hidden_to_output) # signals into final output layer
135 | final_outputs = final_inputs # signals from final output layer
136 |
137 | return final_outputs
138 |
139 |
140 | #########################################################
141 | # Set your hyperparameters here
142 | ##########################################################
143 | iterations = 2500
144 | learning_rate = 0.7
145 | hidden_nodes = 15
146 | output_nodes = 1
147 |
--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------
1 | 
2 |
3 | **Note** These are notes I took while doing the [Udacity Deep Learning Nanodegree](https://eu.udacity.com/course/deep-learning-nanodegree--nd101) program. All rights of the images found in these notes and in the jupyter notebooks go to [Udacity](https://udacity.com) unless explicitly notated. You can see them online [here](https://ibesora.github.io/udacity-deeplearning-notes/)
4 |
5 | # Notes
6 | ## Python and NumPy refresher
7 | [NumPy](https://docs.scipy.org/doc/numpy/reference/) is a math Python library written in C that performs a lot better than base Python.
8 |
9 | [ndarray](https://docs.scipy.org/doc/numpy/reference/arrays.html) objects are used to represent any kind of number. They are like lists but they can have any number of dimensions. We'll use them to represent scalars, vectors, matrices or tensors.
10 |
11 | NumPy lets you specify number types and sizes so instead of using the basic Python types: `int`, `float`, etc. we'll use `uint8`, `int8`, `int16`, ...
12 |
13 | To create a scalar we'll create a NumPy ndarray with only one element as `scalar = np.array(3)`
14 |
15 | To see the shape of an ndarray we'll use `scalar.shape`. In the case of a scalar value it will print `()` as it has 0 dimensions
16 |
17 | To create a vector we'll pass a Python list to the array function `vector = np.array([1, 2, 3])`. Using `vector.shape` would return `(3,)`. We can use advanced indexing such as `vector[1:]` as well. That would return a new vector with the elements from 1 onward. You can read the documentation on NumPy slicing [here](https://docs.scipy.org/doc/numpy/reference/arrays.indexing.html)
18 |
19 | To create matrices we'll pass a list of lists where each list is a matrix row: `matrix = np.array([1, 0, 0], [0, 1, 0])`. `matrix.shape` would then return `(2, 3)` showing that it has two rows with three columns each. To create tensors we'll passa a list of lists of lists of lists and so on.
20 |
21 | NumPy allows to change the shape of an array without changing the underlying data. For example, we can use `vector.reshape(1, 3)` to convert the vector to a 1x3 matrix. You can find the reshape documentation [here](https://docs.scipy.org/doc/numpy/reference/generated/numpy.reshape.html). We can also use slicing to reshape the vector. For example `vector[:, None]` would return a 3x1 matrix and `vector[None, :]` a 1x3 one
22 |
23 | Numpy also helps us to perform element wise operators. Instead of looping through the array and performing an operation to each element we can use something like `ndarray + 5`. Notice that when the elements on both sides of an operator are matrices, NumPy also performs element wise operations. If we want to perform mathematically correct matrix multiplication we can use the [matmul](https://docs.scipy.org/doc/numpy/reference/generated/numpy.matmul.html#numpy.matmul) function. To recap: Given two matrices stored in `ndarray`s `m` and `n`, `m*n` would perform element wise multiplication and `np.matmul(n,m)` would perform mathematically correct matrix multiplication.
24 |
25 | ## Introduction to Neural Networks
26 |
27 | ### Perceptrons
28 | Perceptrons are the building blocks of Neural Networks. If we compare a Neural Network with the brain, perceptrons would be the neurons. They work on a set of inputs and produces an output in the same way a neuron works.
29 |
30 | What they do is the following: Given a set of inputs and weights (the contribution of each input to the final result) they return an answer to the question we are asking. The simplest question we can ask is if an element belongs to a binary classification or not.
31 |
32 | Take for example the university acceptance where acceptance comes from a relation between the course grades and the entrance test exam grade. We can classify all the students in two classes (accepted (blue) or not (red)) and plot their grades in a 2d graph.
33 | 
34 |
35 | The perceptron that answers this question would be like the following:
36 | 
37 | Where the **Step function** is what's called the **activation function**: The function that translates the output of the perceptron to the answer of our question.
38 |
39 | The neat thing about neural networks is that instead of computing the weights ourselves, we give them the output and they compute the weights themselves.
40 |
41 | #### Perceptron algorithm
42 | We can compute the weights of a perceptron the following way:
43 | * Start with random weights: $$w_1, ..., w_n, b$$
44 | * For every misclassified point $$(x_1, ..., x_n)$$
45 | * If $$prediction == 0$$
46 | * For $$i=1..n$$
47 | * $$w_i = w_i + \alpha x_i$$
48 | * $$b = b + \alpha$$
49 | * If $$prediction == 1$$
50 | * For $$i=1..n$$
51 | * $$w_i = w_i - \alpha x_i$$
52 | * $$b = b - \alpha$$
53 |
54 |
55 |
56 | Where $$\alpha$$ is the **learning rate**. We can repeat the loop on the misclassified points until the error is as small as we want or a fixed number of steps.
57 |
58 | To minimize the error we'll use a technique called [gradient descent](#gradient-descent) but to do so we need continuous prediction values and errors. Instead of answering _Is this point correctly classified?_ with a _Yes_ or _No_, we want the answer to be _53.8% likely_. We do that by changing the **step function** and using the **sigmoid function** as the **activation function**. The sigmoid function is defined as follows:
59 |
60 | $$$\sigma(x) = \frac{1}{(1 + e^{-x})}$$$
61 |
62 | Then we can use the following formula as the **error function** introduced by each point:
63 |
64 | $$$E = y - \hat{y}$$$
65 |
66 | where $$y$$ is the actual label and $$\hat{y}$$ is the prediction label of our model
67 |
68 | ### Softmax
69 | When instead of having a binary classification problem we have multiple classes we can compute the probability of being each class by using the **softmax** function. Let's say we have $$N$$ classes and a linear model that gives us the scores $$Z_1, ..., Z_N$$, the probability of being of class $$i$$ is:
70 |
71 | $$$P(i) = \frac{e^{Z_i}}{e^{Z_1} + ... + e^{Z_N}}$$$
72 |
73 | 
74 |
75 | ### One-Hot encoding
76 | We have always worked with numerical properties but sometimes the data has non numerical properties. To use those in our model we have to convert them to numerical properties and we do that using a technique called **one-hot encoding**. What it does is it creates a column per each possible value of the property and sets a $$1$$ to the column value each row has and $$0$$ otherwise. Defining it that way assures that only one of the value columns per property is $$1$$
77 | 
78 |
79 | ### Maximum Likelihood
80 | We can use probability to evaluate how good our model is. Once we have a model, we can compute the probability of each point having the value we gave to them and use it to compute a global value for our model. That's whats called **maximum likelihood**
81 |
82 | The **error function**, $$\hat y = \sigma(Wx+b)$$ computes exactly the probability of a point of being positive (blue in our example), so for the correctly classified points we use the error function directly and for the incorrectly classified points we use the reciprocal $$P(red) = 1 - P(blue)$$.
83 |
84 | 
85 |
86 | Multiplying the probability of each point of being as they are, we have a probability value of the model, where bigger is better.
87 |
88 | $$$Probability = Pb(blue0)*Pb(blue1)*Pr(red1)*Pr(red2)$$$
89 |
90 | Where $$Pb$$ is the probability of being blue and $$Pr$$ is the probability of being red.
91 |
92 | 
93 |
94 | Maximizing the probability of a model, the error decreases, so, how can we maximize the probability of a model?
95 |
96 | First we use logarithms to turn our probability function from a multiplication to a sum, noticing that $$\ln(ab) = \ln(a) + \ln(b)$$. As the logarithm of a number between $$0$$ and $$1$$ is always negative, we must change the expression to a substraction and we have what's called **cross-entropy**. If **cross-entropy** is big, the model is bad.
97 |
98 | Given that a negative logarithm of a number close to 1 is almost 0, and that the negative logarithm of a number close to 0 is big, we can think of these values as errors at each point. That changes our goal from maximizing probability to minimizing the cross entropy.
99 |
100 | Mathematically, the **cross-entropy** of two vectors $$y$$ and $$p$$ is_
101 |
102 | $$$Cross-entropy=\displaystyle\sum_{i=1}^m y_i\ln(p_i) + (1-y_i)ln(1-p_i)$$$
103 |
104 | 
105 |
106 | ### Logistic regression
107 | The **logistic regression** algorithm is one of the most popular and useful algorithms in Machine Learning, and the building block of all that constitutes Deep Learning. It basically goes like this:
108 |
109 | * Take your data
110 | * Pick a random model
111 | * Calculate the error
112 | * Minimize the error, and obtain a better model
113 | * Enjoy!
114 |
115 | To calculate the error of a model we use the **cross-entropy** where the $$y$$ vector is a vector that classifies each point being $$1$$ if the point is blue and $$0$$ if the point is red. That's what we do with one-hot encoding. Then the error function is as follows:
116 |
117 | 
118 |
119 | Two things to notice with this formula:
120 | * When a point is blue, $$y=1$$ and $$(1-y)=0$$ so, in our case, only one of the two logarithms is computed, giving us the same formula we had before
121 | * We do the average as a convention
122 |
123 | Given that we can express our error function as a function of weights and biases, our model error function is:
124 |
125 | $$$E(W,b)=-\frac{1}{m}\displaystyle\sum_{i=1}^m (1-y_i)ln(1-\sigma(Wx^i+b))+ y_i\ln(\sigma(Wx^i+b))$$$
126 |
127 | where $$y_i$$ is the label of the point $$x^i$$
128 |
129 | ### Gradient descent
130 | In order to minimize the error function we must first compute which is the direction that maximizes the descent at each step. We'll take the negative of the gradient of the error function at each step. That assures the error at each step is lower than the error at the previous one. If we repeat this procedure we'll arrive at the minimum of the error function but that isn't always the absolute minimum. In order to not get stucked in some local minimum a number of different techniques can be used. The mathematic definition of the gradient is the following one:
131 |
132 | $$$\bigtriangledown E=(\frac{\delta E}{\delta W_1}, ..., \frac{\delta E}{\delta W_N}, \frac{\delta E}{\delta b})$$$
133 |
134 | When using the **sigmoid** function as the **activation function** the derivative is the following one:
135 |
136 | $$$\sigma'(x) = \frac{\delta}{\delta x}\frac{1}{(1 + e^{-x})}$$$
137 | $$$\sigma'(x) = \frac{e^{-x}}{(1+e^{-x})^2}$$$
138 | $$$\sigma'(x) = \frac{1}{1+e^{-x}}\cdot\frac{e^{-x}}{1+e^{-x}}$$$
139 | $$$\sigma'(x) = \sigma(x)(1 - \sigma(x))$$$
140 |
141 | For a point with coordinates $$(x_1, ..., x_n)$$, label $$y$$ and prediction $$\hat{y}$$, the gradient of the error function at that point is:
142 |
143 | $$$\bigtriangledown E = -(y - \hat{y})(x_1, ..., x_n, 1)$$$
144 |
145 | Therefore, at each step we must update the weights in the following way:
146 |
147 | $$$ w_i' = w_i - \alpha[-(y - \hat{y})x_i] $$$
148 | $$$ w_i' = w_i + \alpha(y - \hat{y})x_i $$$
149 | $$$ b' = b + \alpha(y - \hat{y}) $$$
150 |
151 | Note that since we've taken the average of errors, the term we are adding should be $$\frac{1}{m}\cdot\alpha$$ instead of $$\alpha$$.
152 |
153 | ### Non-linear models
154 | What happens if the classification boundary can't be represented with just a line and we need more complex shapes? The answer is to use multi-layer perceptrons or what's the same, a Neural Network.
155 |
156 | The trick is to use two or more linear models and combine them into a nonlinear model. Formally, we calculate the probability in each model, add them via a weighted sum and use the sigmoid function as the activation function to have a value between 0 and 1. We can express that as a linear combination of the two models:
157 | 
158 |
159 | Or using simplified notation:
160 | 
161 | Where the first layer is called the **input layer**, the final one is called the **output layer** and the ones in-between are called the **hidden layers**. **Deep Neural Networks** are a kind of Neural Networks where there are a lot of hidden layers.
162 |
163 | Notice that we are not limited to having only one node in the output layer. In fact, doing multi-class classification requires to have an output node per each class, each of them giving the probability of the element being in that class.
164 | 
165 |
166 | ### Feedforward
167 | **Feedforward** is the process used by **Neural networks** to generate an output from a set of inputs.
168 | Given the column vector of inputs and bias $$v=\begin{pmatrix} x_1 \\ ... \\ x_n \\ 1\end{pmatrix}$$, a set of weights $$W^k_l$$ where $$l$$ is the index of the weight (as a pair of $$ij$$ where $$i$$ is the input number index and $$j$$ is the destination node index) and $$k$$ is the layer index. Then the prediction for a neural network with two inputs and two layers using the **sigmoid** as the **activation function** as shown in the following image
169 | 
170 |
171 | can be written as the following equation:
172 |
173 | $$$\hat{y} = \sigma\begin{pmatrix}W^2_{11} \\ W^2_{21} \\ W^2_{31}\end{pmatrix}\sigma\begin{pmatrix}W^1_{11} && W^1_{12}\\W^1_{21} && W^1_{22}\\W^1_{31} && W^1_{32}\end{pmatrix}\begin{pmatrix}x_1 \\ x_2 \\ 1\end{pmatrix}$$$
174 |
175 | #### Implementation
176 | This sample code implements a forward pass through a 4x3x2 network, with **sigmoid** activation functions for both output layers:
177 |
178 | ```python
179 | import numpy as np
180 |
181 | def sigmoid(x):
182 | return 1/(1+np.exp(-x))
183 |
184 | # Network size
185 | N_input = 4
186 | N_hidden = 3
187 | N_output = 2
188 |
189 | np.random.seed(42)
190 | # Make some fake data
191 | X = np.random.randn(4)
192 |
193 | weights_input_to_hidden = np.random.normal(0, scale=0.1, size=(N_input, N_hidden))
194 | weights_hidden_to_output = np.random.normal(0, scale=0.1, size=(N_hidden, N_output))
195 |
196 |
197 | # Make a forward pass through the network
198 | hidden_layer_in = np.dot(X, weights_input_to_hidden)
199 | hidden_layer_out = sigmoid(hidden_layer_in)
200 |
201 | output_layer_in = np.dot(hidden_layer_out, weights_hidden_to_output)
202 | output_layer_out = sigmoid(output_layer_in)
203 | ```
204 |
205 | ### Backpropagation
206 | **Backpropagation** is the method used to train the network. What it does in short is to update the starting weights every time the error is bigger than a fixed value. In a nutshell, after a feedforward operation:
207 | 1. Compare the output of the model with the desired output and calculate the error
208 | 2. Run the feedforward operation backwards to spread the error to each weight
209 | 3. Continue this until we have a model that's good
210 |
211 | Mathematically the weight update is performed as
212 |
213 | $$$ W^{\prime k}_{ij} \gets W^k_{ij} - \alpha\frac{\delta E}{\delta W^k_{ij}} $$$
214 |
215 | Note that in order to implement a Neural Network the **error function** used is usually not the one defined before as when the error is big, the error is negative.
216 | Instead, we'll use the **sum of squared errors* that's defined as $$ E = \frac{1}{2} \sum_{\mu} (y^{\mu} - \hat{y}^{\mu})^2 $$ where $$ \mu $$ is the index of each point. And then, the derivative used to update the weights at each backpropagation step is done as follows:
217 |
218 | $$$ \frac{\delta E}{\delta w_i} = \frac{\delta}{\delta w_i}\frac{1}{2}(y - \hat{y})^2$$$
219 | $$$ \frac{\delta E}{\delta w_i} = \frac{\delta}{\delta w_i}\frac{1}{2}(y - \hat{y(w_i)})^2$$$
220 | $$$ \frac{\delta E}{\delta w_i} = (y - \hat{y})\frac{\delta}{\delta w_i}(y - \hat{y})$$$
221 | $$$ \frac{\delta E}{\delta w_i} = -(y - \hat{y})f^{\prime}(h)x_i$$$
222 |
223 | #### Implementation
224 | The following sample code calculates the backpropagation step for two sets of weights
225 |
226 | ```python
227 | import numpy as np
228 |
229 | def sigmoid(x):
230 | return 1 / (1 + np.exp(-x))
231 |
232 | x = np.array([0.5, 0.1, -0.2])
233 | target = 0.6
234 | learnrate = 0.5
235 |
236 | weights_input_hidden = np.array([[0.5, -0.6],
237 | [0.1, -0.2],
238 | [0.1, 0.7]])
239 |
240 | weights_hidden_output = np.array([0.1, -0.3])
241 |
242 | ## Forward pass
243 | hidden_layer_input = np.dot(x, weights_input_hidden)
244 | hidden_layer_output = sigmoid(hidden_layer_input)
245 |
246 | output_layer_in = np.dot(hidden_layer_output, weights_hidden_output)
247 | output = sigmoid(output_layer_in)
248 |
249 | ## Backwards pass
250 | ## Calculate output error
251 | error = target - output
252 |
253 | # Calculate error term for output layer
254 | output_error_term = error * output * (1 - output)
255 |
256 | # Calculate error term for hidden layer
257 | hidden_error_term = np.dot(output_error_term, weights_hidden_output) * \
258 | hidden_layer_output * (1 - hidden_layer_output)
259 |
260 | # Calculate change in weights for hidden layer to output layer
261 | delta_w_h_o = learnrate * output_error_term * hidden_layer_output
262 |
263 | # Calculate change in weights for input layer to hidden layer
264 | delta_w_i_h = learnrate * hidden_error_term * x[:, None]
265 | ```
266 |
267 | ### Putting everything together
268 | Here's the general algorithm for updating the weights with gradient descent:
269 |
270 | * Set the weight step to zero: $$\Delta w_i = 0$$
271 | * For each record in the training data:
272 | * Make a forward pass through the network, calculating the output $$\hat y = f(\sum_i w_i x_i)$$
273 | * Calculate the error term for the output unit, $$\delta = (y - \hat y) * f'(\sum_i w_i x_i)$$
274 | * Update the weight $$\Delta w_i = \Delta w_i + \delta x_i$$
275 | * Update the weights $$w_i = w_i + \eta \Delta w_i / m$$ where $$\eta$$ is the learning rate and $$m$$ is the number of records. Here we're averaging the weight steps to help reduce any large variations in the training data.
276 | * Repeat for $$e$$ epochs.
277 |
278 | You can also update the weights on each record instead of averaging the weight steps after going through all the records.
279 |
280 | Remember that we're using the sigmoid for the activation function, $$f(h) = 1/(1+e^{-h})$$
281 |
282 | And the gradient of the sigmoid is $$f'(h) = f(h) (1 - f(h))$$ where $$h$$ is the input to the output unit, $$h = \sum_i w_i x_i$$
283 |
284 | The following sample code implements gradient descent of a single hidden layer neural network that uses the **sigmoid** as the **activation function** for its output:
285 | ```python
286 | for e in range(epochs):
287 | del_w_input_hidden = np.zeros(weights_input_hidden.shape)
288 | del_w_hidden_output = np.zeros(weights_hidden_output.shape)
289 | for x, y in zip(features.values, targets):
290 | ## Forward pass ##
291 | # Calculate the output
292 | hidden_input = np.dot(x, weights_input_hidden)
293 | hidden_output = sigmoid(hidden_input)
294 |
295 | output = sigmoid(np.dot(hidden_output,
296 | weights_hidden_output))
297 |
298 | ## Backward pass ##
299 | # Calculate the network's prediction error
300 | error = y - output
301 |
302 | # Calculate error term for the output unit
303 | output_error_term = error * output * (1 - output)
304 |
305 | ## propagate errors to hidden layer
306 |
307 | # Calculate the hidden layer's contribution to the error
308 | hidden_error = np.dot(output_error_term, weights_hidden_output)
309 |
310 | # Calculate the error term for the hidden layer
311 | hidden_error_term = hidden_error * hidden_output * (1 - hidden_output)
312 |
313 | # Update the change in weights
314 | del_w_hidden_output += output_error_term * hidden_output
315 | del_w_input_hidden += hidden_error_term * x[:, None]
316 |
317 | # Update weights
318 | weights_input_hidden += learnrate * del_w_input_hidden / n_records
319 | weights_hidden_output += learnrate * del_w_hidden_output / n_records
320 | ```
321 |
322 | ## Neural Networks problems
323 | ### Overfitting and underfitting
324 | **Overfitting** is like trying to kill a fly with a bazooka. We are trying a solution over complicated for the problem at hand. See what happens if we do a too specific classification and try to add a data point that was not there, the purple dog in our image.
325 | 
326 | Overfitting can also be seen as studying too much, memorizing each letter in the lesson but not knowing how to understand the information there so you can answer something not found in the book.
327 |
328 | **Underfitting** is trying to kill godzilla with a fly swatter. We are trying a solution that is too simple and won't do the job. It's also called **error due to bias**. In the following image we can see what happens if we do a too unspecific classification. The cat would also be classified as not animals although it's an animal
329 | 
330 | Underfitting can also be seen as not studying enough for an exam and failing a test.
331 |
332 | ### Model complexity graph
333 | In order to validate the model we use two sets, a training one and a testing one. The first one is used to train the network and the second one to validate the results.
334 | As long as the neural network is running the error on the training set would be getting lower and lower. The error on the testing on the other hand starts to increase when the model starts to overfit. In order to not overfit the model we should stop the iterations when the testing error starts to increase, this is called **early stopping**.
335 |
336 | We can see it graphically in the model complexity graph.
337 | 
338 |
339 | ### Dropout
340 | If the training set only works on some nodes and not in others, the nodes that get all the work would end with very large weights and ends up dominating all the training. In order to solve this problem, we can deactivate some nodes in each run so they are not used. When that's done, the working nodes have to pick up the slack and take more part in the training.
341 |
342 | What we'll do to drop the nodes is we'll give the algorithm a parameter with the probability that each node gets dropped at a particular run. On average, each node will get the same treatment.
343 |
344 | ### Common problems
345 | When we were talking about [gradient descent](#gradient-descent) we said that we could get stuck in a local minima instead of the absolute one. The error function cannot distinguish between both types.
346 |
347 | Another thing that can happen is the **vanishing gradient**. Taking a look at the **sigmoid** function we can see that when the values are very high or very low the function is almost horizontal. That gives us an almost 0 gradient so each step would be really small and we could end all the steps without arriving to the point that minimizes the error. In order to solve this problem, we can use another **activation function**. One that's used a lot is the **hyperbolic tangent function**:
348 |
349 | $$$ tanh(x) = \frac{e^x - e^{-x}}{e^x + e^{-x}} $$$
350 |
351 | Since our range is between $$-1$$ and $$1$$ the derivatives are larger.
352 |
353 | Another commonly used function is the **Rectified Linear Unit** or **ReLU** in short.
354 |
355 | $$$ relu(x) = \begin{cases}x &\text{if } x\geqslant 0 \\ 0 &\text{if } x<0\end{cases}$$$
356 |
357 | In order to avoid doing a lot of computations and using tons of memory for a single step we'll use **stochastic gradient descent**. If the data is evenly distributed, a small subset of it would give us a pretty good idea of what the gradient would be. What we do is to split all the data into several batches and run each batch through the neural network, calculate the error and its gradient and back-propagate to update the weights. Each step is less acurate than using all the data but it's much better to take a bunch of slightly innacurate steps than to take a good one.
358 |
359 | If the learning rate is too big, you're taking huge steps which could be fast but you might miss the minimum. Doing small steps with a small learning rate guarantees finding the minimum but might make the model really slow. The best learning rates are those which decrease as the model is getting closer to a solution. If the gradient is steep, we take long steps, if it's plain, we take small steps.
360 |
361 | One way to avoid getting to local minimums is to use **random restarts**. We start the process from different places and do gradient descent from all of them. This doesn't guarantee finding the absolute minimum but increases it's probability. Another way of doing it is using **momentum**. The idea is to take each step with determination in a way that if you get stuck to a local minimum, you can jump over it and look for another minimum. In order to compute the momentum we can do a weighted average of the last steps. Given a constant $$\beta$$, between $$0$$ and $$1$$ the formula is as follows:
362 |
363 | $$$ STEP(n) = STEP(n) + \beta STEP(n-1) + \beta^2 STEP(n-2) + ... $$$
364 |
365 | This way, the steps that gradient descent has taken time ago matters less than the ones that happened recently.
366 |
367 | ## Sentiment Analysis
368 |
369 | ## Keras
370 | [Keras](https://keras.io/) is a hihg-level neural networks API written in Python that can be run on top of [TensorFlow](https://github.com/tensorflow/tensorflow).
371 |
372 | ## Concepts
373 |
374 | * *Sequential Model:* The [keras.models.Sequential](https://keras.io/models/sequential/) class is a wrapper for a neural network model that treats the network as a sequence of layers.
375 | ```python
376 | from keras.models import Sequential
377 |
378 | #Create the Sequential model
379 | model = Sequential()
380 | ```
381 | * *Layers:* The [keras.Layers](https://keras.io/layers/about-keras-layers/) class provides common methods for a variety of standard neural network layers. These layers can be added to a model with the add() method. The shape of the first layer must be specified but Keras will infer the shape of all other layers automatically.
382 |
383 | ## First model
384 | A single hidden layer model might look like this:
385 | ```python
386 | import numpy as np
387 | from keras.models import Sequential
388 | from keras.layers.core import Dense, Activation
389 |
390 | # X has shape (num_rows, num_cols), where the training data are stored
391 | # as row vectors
392 | X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]], dtype=np.float32)
393 |
394 | # y must have an output vector for each input vector
395 | y = np.array([[0], [0], [0], [1]], dtype=np.float32)
396 |
397 | # Create the Sequential model
398 | model = Sequential()
399 |
400 | # 1st Layer - Add an input layer of 32 nodes with the same input shape as
401 | # the training samples in X
402 | model.add(Dense(32, input_dim=X.shape[1]))
403 |
404 | # Add a softmax activation layer
405 | model.add(Activation('softmax'))
406 |
407 | # 2nd Layer - Add a fully connected output layer
408 | model.add(Dense(1))
409 |
410 | # Add a sigmoid activation layer
411 | model.add(Activation('sigmoid'))
412 | ```
413 |
414 | Notice that the first hidden layer creates 32 nodes which expect to receive 2-element vectors as inputs. We can also see that the output layer is just a node of dimension 1. The activation functions are added as individual nodes using the [Activation](https://keras.io/activations/) keyword.
415 |
416 | Each layer takes the output of the previous one, computes what it needs to compute, and pipes it through the next layer.
417 |
418 | After adding all the layers, the model needs to be compiled before it can be run. Compiling a model calls the backend where it will be run, binds the [*optimizer*](https://keras.io/optimizers/), [*loss function*](https://keras.io/losses/), [*metrics*](https://keras.io/metrics/) and other parameters required before the model can be run on the input data.
419 |
420 | ```python
421 | model.compile(loss="categorical_crossentropy", optimizer="adam", metrics=["accuracy"])
422 | ```
423 |
424 | We can use ``model.summary()`` to see the resulting model architecture, ``model.fit(X, y, nb_epoch=1000, verbose=0)`` to train the model and ``model.evaluate()`` to evaluate it.
425 |
426 | # Projects
427 | ## Predicting bike sharing
428 | A project, with a Neural Network built from scratch to predic the number of bikeshare users on any given day.
429 |
430 | You can find the implementation, including a Jupyter Notebook, [here](https://github.com/ibesora/udacity-deeplearning-notes/tree/master/projects/first-neural-network)
431 |
432 | # Resources
433 | ## Links
434 | ### Repositories
435 | * [Fast style transfer repo](https://github.com/lengstrom/fast-style-transfer)
436 | * [DeepTraffic](https://selfdrivingcars.mit.edu/deeptraffic/)
437 | * [Flappy bird repo](https://github.com/yenchenlin/DeepLearningFlappyBird)
438 |
439 | ### Readings
440 | * [Yes, you should understand backprop](https://medium.com/@karpathy/yes-you-should-understand-backprop-e2f06eab496b)
441 | * [Stanford's CS231n course lecture](https://www.youtube.com/watch?v=59Hbtz7XgjM)
442 |
443 | ## Books to read
444 | * [Grokking Deep Learning](https://www.manning.com/books/grokking-deep-learning)
445 | * [Neural Networks and Deep Learning](http://neuralnetworksanddeeplearning.com/)
446 | * [The deep learning text book](http://www.deeplearningbook.org/)
447 |
448 | ## Websites
449 | * [Keras](https://keras.io)
--------------------------------------------------------------------------------
/projects/first-neural-network/Bike-Sharing-Dataset/day.csv:
--------------------------------------------------------------------------------
1 | instant,dteday,season,yr,mnth,holiday,weekday,workingday,weathersit,temp,atemp,hum,windspeed,casual,registered,cnt
2 | 1,2011-01-01,1,0,1,0,6,0,2,0.344167,0.363625,0.805833,0.160446,331,654,985
3 | 2,2011-01-02,1,0,1,0,0,0,2,0.363478,0.353739,0.696087,0.248539,131,670,801
4 | 3,2011-01-03,1,0,1,0,1,1,1,0.196364,0.189405,0.437273,0.248309,120,1229,1349
5 | 4,2011-01-04,1,0,1,0,2,1,1,0.2,0.212122,0.590435,0.160296,108,1454,1562
6 | 5,2011-01-05,1,0,1,0,3,1,1,0.226957,0.22927,0.436957,0.1869,82,1518,1600
7 | 6,2011-01-06,1,0,1,0,4,1,1,0.204348,0.233209,0.518261,0.0895652,88,1518,1606
8 | 7,2011-01-07,1,0,1,0,5,1,2,0.196522,0.208839,0.498696,0.168726,148,1362,1510
9 | 8,2011-01-08,1,0,1,0,6,0,2,0.165,0.162254,0.535833,0.266804,68,891,959
10 | 9,2011-01-09,1,0,1,0,0,0,1,0.138333,0.116175,0.434167,0.36195,54,768,822
11 | 10,2011-01-10,1,0,1,0,1,1,1,0.150833,0.150888,0.482917,0.223267,41,1280,1321
12 | 11,2011-01-11,1,0,1,0,2,1,2,0.169091,0.191464,0.686364,0.122132,43,1220,1263
13 | 12,2011-01-12,1,0,1,0,3,1,1,0.172727,0.160473,0.599545,0.304627,25,1137,1162
14 | 13,2011-01-13,1,0,1,0,4,1,1,0.165,0.150883,0.470417,0.301,38,1368,1406
15 | 14,2011-01-14,1,0,1,0,5,1,1,0.16087,0.188413,0.537826,0.126548,54,1367,1421
16 | 15,2011-01-15,1,0,1,0,6,0,2,0.233333,0.248112,0.49875,0.157963,222,1026,1248
17 | 16,2011-01-16,1,0,1,0,0,0,1,0.231667,0.234217,0.48375,0.188433,251,953,1204
18 | 17,2011-01-17,1,0,1,1,1,0,2,0.175833,0.176771,0.5375,0.194017,117,883,1000
19 | 18,2011-01-18,1,0,1,0,2,1,2,0.216667,0.232333,0.861667,0.146775,9,674,683
20 | 19,2011-01-19,1,0,1,0,3,1,2,0.292174,0.298422,0.741739,0.208317,78,1572,1650
21 | 20,2011-01-20,1,0,1,0,4,1,2,0.261667,0.25505,0.538333,0.195904,83,1844,1927
22 | 21,2011-01-21,1,0,1,0,5,1,1,0.1775,0.157833,0.457083,0.353242,75,1468,1543
23 | 22,2011-01-22,1,0,1,0,6,0,1,0.0591304,0.0790696,0.4,0.17197,93,888,981
24 | 23,2011-01-23,1,0,1,0,0,0,1,0.0965217,0.0988391,0.436522,0.2466,150,836,986
25 | 24,2011-01-24,1,0,1,0,1,1,1,0.0973913,0.11793,0.491739,0.15833,86,1330,1416
26 | 25,2011-01-25,1,0,1,0,2,1,2,0.223478,0.234526,0.616957,0.129796,186,1799,1985
27 | 26,2011-01-26,1,0,1,0,3,1,3,0.2175,0.2036,0.8625,0.29385,34,472,506
28 | 27,2011-01-27,1,0,1,0,4,1,1,0.195,0.2197,0.6875,0.113837,15,416,431
29 | 28,2011-01-28,1,0,1,0,5,1,2,0.203478,0.223317,0.793043,0.1233,38,1129,1167
30 | 29,2011-01-29,1,0,1,0,6,0,1,0.196522,0.212126,0.651739,0.145365,123,975,1098
31 | 30,2011-01-30,1,0,1,0,0,0,1,0.216522,0.250322,0.722174,0.0739826,140,956,1096
32 | 31,2011-01-31,1,0,1,0,1,1,2,0.180833,0.18625,0.60375,0.187192,42,1459,1501
33 | 32,2011-02-01,1,0,2,0,2,1,2,0.192174,0.23453,0.829565,0.053213,47,1313,1360
34 | 33,2011-02-02,1,0,2,0,3,1,2,0.26,0.254417,0.775417,0.264308,72,1454,1526
35 | 34,2011-02-03,1,0,2,0,4,1,1,0.186957,0.177878,0.437826,0.277752,61,1489,1550
36 | 35,2011-02-04,1,0,2,0,5,1,2,0.211304,0.228587,0.585217,0.127839,88,1620,1708
37 | 36,2011-02-05,1,0,2,0,6,0,2,0.233333,0.243058,0.929167,0.161079,100,905,1005
38 | 37,2011-02-06,1,0,2,0,0,0,1,0.285833,0.291671,0.568333,0.1418,354,1269,1623
39 | 38,2011-02-07,1,0,2,0,1,1,1,0.271667,0.303658,0.738333,0.0454083,120,1592,1712
40 | 39,2011-02-08,1,0,2,0,2,1,1,0.220833,0.198246,0.537917,0.36195,64,1466,1530
41 | 40,2011-02-09,1,0,2,0,3,1,2,0.134783,0.144283,0.494783,0.188839,53,1552,1605
42 | 41,2011-02-10,1,0,2,0,4,1,1,0.144348,0.149548,0.437391,0.221935,47,1491,1538
43 | 42,2011-02-11,1,0,2,0,5,1,1,0.189091,0.213509,0.506364,0.10855,149,1597,1746
44 | 43,2011-02-12,1,0,2,0,6,0,1,0.2225,0.232954,0.544167,0.203367,288,1184,1472
45 | 44,2011-02-13,1,0,2,0,0,0,1,0.316522,0.324113,0.457391,0.260883,397,1192,1589
46 | 45,2011-02-14,1,0,2,0,1,1,1,0.415,0.39835,0.375833,0.417908,208,1705,1913
47 | 46,2011-02-15,1,0,2,0,2,1,1,0.266087,0.254274,0.314348,0.291374,140,1675,1815
48 | 47,2011-02-16,1,0,2,0,3,1,1,0.318261,0.3162,0.423478,0.251791,218,1897,2115
49 | 48,2011-02-17,1,0,2,0,4,1,1,0.435833,0.428658,0.505,0.230104,259,2216,2475
50 | 49,2011-02-18,1,0,2,0,5,1,1,0.521667,0.511983,0.516667,0.264925,579,2348,2927
51 | 50,2011-02-19,1,0,2,0,6,0,1,0.399167,0.391404,0.187917,0.507463,532,1103,1635
52 | 51,2011-02-20,1,0,2,0,0,0,1,0.285217,0.27733,0.407826,0.223235,639,1173,1812
53 | 52,2011-02-21,1,0,2,1,1,0,2,0.303333,0.284075,0.605,0.307846,195,912,1107
54 | 53,2011-02-22,1,0,2,0,2,1,1,0.182222,0.186033,0.577778,0.195683,74,1376,1450
55 | 54,2011-02-23,1,0,2,0,3,1,1,0.221739,0.245717,0.423043,0.094113,139,1778,1917
56 | 55,2011-02-24,1,0,2,0,4,1,2,0.295652,0.289191,0.697391,0.250496,100,1707,1807
57 | 56,2011-02-25,1,0,2,0,5,1,2,0.364348,0.350461,0.712174,0.346539,120,1341,1461
58 | 57,2011-02-26,1,0,2,0,6,0,1,0.2825,0.282192,0.537917,0.186571,424,1545,1969
59 | 58,2011-02-27,1,0,2,0,0,0,1,0.343478,0.351109,0.68,0.125248,694,1708,2402
60 | 59,2011-02-28,1,0,2,0,1,1,2,0.407273,0.400118,0.876364,0.289686,81,1365,1446
61 | 60,2011-03-01,1,0,3,0,2,1,1,0.266667,0.263879,0.535,0.216425,137,1714,1851
62 | 61,2011-03-02,1,0,3,0,3,1,1,0.335,0.320071,0.449583,0.307833,231,1903,2134
63 | 62,2011-03-03,1,0,3,0,4,1,1,0.198333,0.200133,0.318333,0.225754,123,1562,1685
64 | 63,2011-03-04,1,0,3,0,5,1,2,0.261667,0.255679,0.610417,0.203346,214,1730,1944
65 | 64,2011-03-05,1,0,3,0,6,0,2,0.384167,0.378779,0.789167,0.251871,640,1437,2077
66 | 65,2011-03-06,1,0,3,0,0,0,2,0.376522,0.366252,0.948261,0.343287,114,491,605
67 | 66,2011-03-07,1,0,3,0,1,1,1,0.261739,0.238461,0.551304,0.341352,244,1628,1872
68 | 67,2011-03-08,1,0,3,0,2,1,1,0.2925,0.3024,0.420833,0.12065,316,1817,2133
69 | 68,2011-03-09,1,0,3,0,3,1,2,0.295833,0.286608,0.775417,0.22015,191,1700,1891
70 | 69,2011-03-10,1,0,3,0,4,1,3,0.389091,0.385668,0,0.261877,46,577,623
71 | 70,2011-03-11,1,0,3,0,5,1,2,0.316522,0.305,0.649565,0.23297,247,1730,1977
72 | 71,2011-03-12,1,0,3,0,6,0,1,0.329167,0.32575,0.594583,0.220775,724,1408,2132
73 | 72,2011-03-13,1,0,3,0,0,0,1,0.384348,0.380091,0.527391,0.270604,982,1435,2417
74 | 73,2011-03-14,1,0,3,0,1,1,1,0.325217,0.332,0.496957,0.136926,359,1687,2046
75 | 74,2011-03-15,1,0,3,0,2,1,2,0.317391,0.318178,0.655652,0.184309,289,1767,2056
76 | 75,2011-03-16,1,0,3,0,3,1,2,0.365217,0.36693,0.776522,0.203117,321,1871,2192
77 | 76,2011-03-17,1,0,3,0,4,1,1,0.415,0.410333,0.602917,0.209579,424,2320,2744
78 | 77,2011-03-18,1,0,3,0,5,1,1,0.54,0.527009,0.525217,0.231017,884,2355,3239
79 | 78,2011-03-19,1,0,3,0,6,0,1,0.4725,0.466525,0.379167,0.368167,1424,1693,3117
80 | 79,2011-03-20,1,0,3,0,0,0,1,0.3325,0.32575,0.47375,0.207721,1047,1424,2471
81 | 80,2011-03-21,2,0,3,0,1,1,2,0.430435,0.409735,0.737391,0.288783,401,1676,2077
82 | 81,2011-03-22,2,0,3,0,2,1,1,0.441667,0.440642,0.624583,0.22575,460,2243,2703
83 | 82,2011-03-23,2,0,3,0,3,1,2,0.346957,0.337939,0.839565,0.234261,203,1918,2121
84 | 83,2011-03-24,2,0,3,0,4,1,2,0.285,0.270833,0.805833,0.243787,166,1699,1865
85 | 84,2011-03-25,2,0,3,0,5,1,1,0.264167,0.256312,0.495,0.230725,300,1910,2210
86 | 85,2011-03-26,2,0,3,0,6,0,1,0.265833,0.257571,0.394167,0.209571,981,1515,2496
87 | 86,2011-03-27,2,0,3,0,0,0,2,0.253043,0.250339,0.493913,0.1843,472,1221,1693
88 | 87,2011-03-28,2,0,3,0,1,1,1,0.264348,0.257574,0.302174,0.212204,222,1806,2028
89 | 88,2011-03-29,2,0,3,0,2,1,1,0.3025,0.292908,0.314167,0.226996,317,2108,2425
90 | 89,2011-03-30,2,0,3,0,3,1,2,0.3,0.29735,0.646667,0.172888,168,1368,1536
91 | 90,2011-03-31,2,0,3,0,4,1,3,0.268333,0.257575,0.918333,0.217646,179,1506,1685
92 | 91,2011-04-01,2,0,4,0,5,1,2,0.3,0.283454,0.68625,0.258708,307,1920,2227
93 | 92,2011-04-02,2,0,4,0,6,0,2,0.315,0.315637,0.65375,0.197146,898,1354,2252
94 | 93,2011-04-03,2,0,4,0,0,0,1,0.378333,0.378767,0.48,0.182213,1651,1598,3249
95 | 94,2011-04-04,2,0,4,0,1,1,1,0.573333,0.542929,0.42625,0.385571,734,2381,3115
96 | 95,2011-04-05,2,0,4,0,2,1,2,0.414167,0.39835,0.642083,0.388067,167,1628,1795
97 | 96,2011-04-06,2,0,4,0,3,1,1,0.390833,0.387608,0.470833,0.263063,413,2395,2808
98 | 97,2011-04-07,2,0,4,0,4,1,1,0.4375,0.433696,0.602917,0.162312,571,2570,3141
99 | 98,2011-04-08,2,0,4,0,5,1,2,0.335833,0.324479,0.83625,0.226992,172,1299,1471
100 | 99,2011-04-09,2,0,4,0,6,0,2,0.3425,0.341529,0.8775,0.133083,879,1576,2455
101 | 100,2011-04-10,2,0,4,0,0,0,2,0.426667,0.426737,0.8575,0.146767,1188,1707,2895
102 | 101,2011-04-11,2,0,4,0,1,1,2,0.595652,0.565217,0.716956,0.324474,855,2493,3348
103 | 102,2011-04-12,2,0,4,0,2,1,2,0.5025,0.493054,0.739167,0.274879,257,1777,2034
104 | 103,2011-04-13,2,0,4,0,3,1,2,0.4125,0.417283,0.819167,0.250617,209,1953,2162
105 | 104,2011-04-14,2,0,4,0,4,1,1,0.4675,0.462742,0.540417,0.1107,529,2738,3267
106 | 105,2011-04-15,2,0,4,1,5,0,1,0.446667,0.441913,0.67125,0.226375,642,2484,3126
107 | 106,2011-04-16,2,0,4,0,6,0,3,0.430833,0.425492,0.888333,0.340808,121,674,795
108 | 107,2011-04-17,2,0,4,0,0,0,1,0.456667,0.445696,0.479583,0.303496,1558,2186,3744
109 | 108,2011-04-18,2,0,4,0,1,1,1,0.5125,0.503146,0.5425,0.163567,669,2760,3429
110 | 109,2011-04-19,2,0,4,0,2,1,2,0.505833,0.489258,0.665833,0.157971,409,2795,3204
111 | 110,2011-04-20,2,0,4,0,3,1,1,0.595,0.564392,0.614167,0.241925,613,3331,3944
112 | 111,2011-04-21,2,0,4,0,4,1,1,0.459167,0.453892,0.407083,0.325258,745,3444,4189
113 | 112,2011-04-22,2,0,4,0,5,1,2,0.336667,0.321954,0.729583,0.219521,177,1506,1683
114 | 113,2011-04-23,2,0,4,0,6,0,2,0.46,0.450121,0.887917,0.230725,1462,2574,4036
115 | 114,2011-04-24,2,0,4,0,0,0,2,0.581667,0.551763,0.810833,0.192175,1710,2481,4191
116 | 115,2011-04-25,2,0,4,0,1,1,1,0.606667,0.5745,0.776667,0.185333,773,3300,4073
117 | 116,2011-04-26,2,0,4,0,2,1,1,0.631667,0.594083,0.729167,0.3265,678,3722,4400
118 | 117,2011-04-27,2,0,4,0,3,1,2,0.62,0.575142,0.835417,0.3122,547,3325,3872
119 | 118,2011-04-28,2,0,4,0,4,1,2,0.6175,0.578929,0.700833,0.320908,569,3489,4058
120 | 119,2011-04-29,2,0,4,0,5,1,1,0.51,0.497463,0.457083,0.240063,878,3717,4595
121 | 120,2011-04-30,2,0,4,0,6,0,1,0.4725,0.464021,0.503333,0.235075,1965,3347,5312
122 | 121,2011-05-01,2,0,5,0,0,0,2,0.451667,0.448204,0.762083,0.106354,1138,2213,3351
123 | 122,2011-05-02,2,0,5,0,1,1,2,0.549167,0.532833,0.73,0.183454,847,3554,4401
124 | 123,2011-05-03,2,0,5,0,2,1,2,0.616667,0.582079,0.697083,0.342667,603,3848,4451
125 | 124,2011-05-04,2,0,5,0,3,1,2,0.414167,0.40465,0.737083,0.328996,255,2378,2633
126 | 125,2011-05-05,2,0,5,0,4,1,1,0.459167,0.441917,0.444167,0.295392,614,3819,4433
127 | 126,2011-05-06,2,0,5,0,5,1,1,0.479167,0.474117,0.59,0.228246,894,3714,4608
128 | 127,2011-05-07,2,0,5,0,6,0,1,0.52,0.512621,0.54125,0.16045,1612,3102,4714
129 | 128,2011-05-08,2,0,5,0,0,0,1,0.528333,0.518933,0.631667,0.0746375,1401,2932,4333
130 | 129,2011-05-09,2,0,5,0,1,1,1,0.5325,0.525246,0.58875,0.176,664,3698,4362
131 | 130,2011-05-10,2,0,5,0,2,1,1,0.5325,0.522721,0.489167,0.115671,694,4109,4803
132 | 131,2011-05-11,2,0,5,0,3,1,1,0.5425,0.5284,0.632917,0.120642,550,3632,4182
133 | 132,2011-05-12,2,0,5,0,4,1,1,0.535,0.523363,0.7475,0.189667,695,4169,4864
134 | 133,2011-05-13,2,0,5,0,5,1,2,0.5125,0.4943,0.863333,0.179725,692,3413,4105
135 | 134,2011-05-14,2,0,5,0,6,0,2,0.520833,0.500629,0.9225,0.13495,902,2507,3409
136 | 135,2011-05-15,2,0,5,0,0,0,2,0.5625,0.536,0.867083,0.152979,1582,2971,4553
137 | 136,2011-05-16,2,0,5,0,1,1,1,0.5775,0.550512,0.787917,0.126871,773,3185,3958
138 | 137,2011-05-17,2,0,5,0,2,1,2,0.561667,0.538529,0.837917,0.277354,678,3445,4123
139 | 138,2011-05-18,2,0,5,0,3,1,2,0.55,0.527158,0.87,0.201492,536,3319,3855
140 | 139,2011-05-19,2,0,5,0,4,1,2,0.530833,0.510742,0.829583,0.108213,735,3840,4575
141 | 140,2011-05-20,2,0,5,0,5,1,1,0.536667,0.529042,0.719583,0.125013,909,4008,4917
142 | 141,2011-05-21,2,0,5,0,6,0,1,0.6025,0.571975,0.626667,0.12065,2258,3547,5805
143 | 142,2011-05-22,2,0,5,0,0,0,1,0.604167,0.5745,0.749583,0.148008,1576,3084,4660
144 | 143,2011-05-23,2,0,5,0,1,1,2,0.631667,0.590296,0.81,0.233842,836,3438,4274
145 | 144,2011-05-24,2,0,5,0,2,1,2,0.66,0.604813,0.740833,0.207092,659,3833,4492
146 | 145,2011-05-25,2,0,5,0,3,1,1,0.660833,0.615542,0.69625,0.154233,740,4238,4978
147 | 146,2011-05-26,2,0,5,0,4,1,1,0.708333,0.654688,0.6775,0.199642,758,3919,4677
148 | 147,2011-05-27,2,0,5,0,5,1,1,0.681667,0.637008,0.65375,0.240679,871,3808,4679
149 | 148,2011-05-28,2,0,5,0,6,0,1,0.655833,0.612379,0.729583,0.230092,2001,2757,4758
150 | 149,2011-05-29,2,0,5,0,0,0,1,0.6675,0.61555,0.81875,0.213938,2355,2433,4788
151 | 150,2011-05-30,2,0,5,1,1,0,1,0.733333,0.671092,0.685,0.131225,1549,2549,4098
152 | 151,2011-05-31,2,0,5,0,2,1,1,0.775,0.725383,0.636667,0.111329,673,3309,3982
153 | 152,2011-06-01,2,0,6,0,3,1,2,0.764167,0.720967,0.677083,0.207092,513,3461,3974
154 | 153,2011-06-02,2,0,6,0,4,1,1,0.715,0.643942,0.305,0.292287,736,4232,4968
155 | 154,2011-06-03,2,0,6,0,5,1,1,0.62,0.587133,0.354167,0.253121,898,4414,5312
156 | 155,2011-06-04,2,0,6,0,6,0,1,0.635,0.594696,0.45625,0.123142,1869,3473,5342
157 | 156,2011-06-05,2,0,6,0,0,0,2,0.648333,0.616804,0.6525,0.138692,1685,3221,4906
158 | 157,2011-06-06,2,0,6,0,1,1,1,0.678333,0.621858,0.6,0.121896,673,3875,4548
159 | 158,2011-06-07,2,0,6,0,2,1,1,0.7075,0.65595,0.597917,0.187808,763,4070,4833
160 | 159,2011-06-08,2,0,6,0,3,1,1,0.775833,0.727279,0.622083,0.136817,676,3725,4401
161 | 160,2011-06-09,2,0,6,0,4,1,2,0.808333,0.757579,0.568333,0.149883,563,3352,3915
162 | 161,2011-06-10,2,0,6,0,5,1,1,0.755,0.703292,0.605,0.140554,815,3771,4586
163 | 162,2011-06-11,2,0,6,0,6,0,1,0.725,0.678038,0.654583,0.15485,1729,3237,4966
164 | 163,2011-06-12,2,0,6,0,0,0,1,0.6925,0.643325,0.747917,0.163567,1467,2993,4460
165 | 164,2011-06-13,2,0,6,0,1,1,1,0.635,0.601654,0.494583,0.30535,863,4157,5020
166 | 165,2011-06-14,2,0,6,0,2,1,1,0.604167,0.591546,0.507083,0.269283,727,4164,4891
167 | 166,2011-06-15,2,0,6,0,3,1,1,0.626667,0.587754,0.471667,0.167912,769,4411,5180
168 | 167,2011-06-16,2,0,6,0,4,1,2,0.628333,0.595346,0.688333,0.206471,545,3222,3767
169 | 168,2011-06-17,2,0,6,0,5,1,1,0.649167,0.600383,0.735833,0.143029,863,3981,4844
170 | 169,2011-06-18,2,0,6,0,6,0,1,0.696667,0.643954,0.670417,0.119408,1807,3312,5119
171 | 170,2011-06-19,2,0,6,0,0,0,2,0.699167,0.645846,0.666667,0.102,1639,3105,4744
172 | 171,2011-06-20,2,0,6,0,1,1,2,0.635,0.595346,0.74625,0.155475,699,3311,4010
173 | 172,2011-06-21,3,0,6,0,2,1,2,0.680833,0.637646,0.770417,0.171025,774,4061,4835
174 | 173,2011-06-22,3,0,6,0,3,1,1,0.733333,0.693829,0.7075,0.172262,661,3846,4507
175 | 174,2011-06-23,3,0,6,0,4,1,2,0.728333,0.693833,0.703333,0.238804,746,4044,4790
176 | 175,2011-06-24,3,0,6,0,5,1,1,0.724167,0.656583,0.573333,0.222025,969,4022,4991
177 | 176,2011-06-25,3,0,6,0,6,0,1,0.695,0.643313,0.483333,0.209571,1782,3420,5202
178 | 177,2011-06-26,3,0,6,0,0,0,1,0.68,0.637629,0.513333,0.0945333,1920,3385,5305
179 | 178,2011-06-27,3,0,6,0,1,1,2,0.6825,0.637004,0.658333,0.107588,854,3854,4708
180 | 179,2011-06-28,3,0,6,0,2,1,1,0.744167,0.692558,0.634167,0.144283,732,3916,4648
181 | 180,2011-06-29,3,0,6,0,3,1,1,0.728333,0.654688,0.497917,0.261821,848,4377,5225
182 | 181,2011-06-30,3,0,6,0,4,1,1,0.696667,0.637008,0.434167,0.185312,1027,4488,5515
183 | 182,2011-07-01,3,0,7,0,5,1,1,0.7225,0.652162,0.39625,0.102608,1246,4116,5362
184 | 183,2011-07-02,3,0,7,0,6,0,1,0.738333,0.667308,0.444583,0.115062,2204,2915,5119
185 | 184,2011-07-03,3,0,7,0,0,0,2,0.716667,0.668575,0.6825,0.228858,2282,2367,4649
186 | 185,2011-07-04,3,0,7,1,1,0,2,0.726667,0.665417,0.637917,0.0814792,3065,2978,6043
187 | 186,2011-07-05,3,0,7,0,2,1,1,0.746667,0.696338,0.590417,0.126258,1031,3634,4665
188 | 187,2011-07-06,3,0,7,0,3,1,1,0.72,0.685633,0.743333,0.149883,784,3845,4629
189 | 188,2011-07-07,3,0,7,0,4,1,1,0.75,0.686871,0.65125,0.1592,754,3838,4592
190 | 189,2011-07-08,3,0,7,0,5,1,2,0.709167,0.670483,0.757917,0.225129,692,3348,4040
191 | 190,2011-07-09,3,0,7,0,6,0,1,0.733333,0.664158,0.609167,0.167912,1988,3348,5336
192 | 191,2011-07-10,3,0,7,0,0,0,1,0.7475,0.690025,0.578333,0.183471,1743,3138,4881
193 | 192,2011-07-11,3,0,7,0,1,1,1,0.7625,0.729804,0.635833,0.282337,723,3363,4086
194 | 193,2011-07-12,3,0,7,0,2,1,1,0.794167,0.739275,0.559167,0.200254,662,3596,4258
195 | 194,2011-07-13,3,0,7,0,3,1,1,0.746667,0.689404,0.631667,0.146133,748,3594,4342
196 | 195,2011-07-14,3,0,7,0,4,1,1,0.680833,0.635104,0.47625,0.240667,888,4196,5084
197 | 196,2011-07-15,3,0,7,0,5,1,1,0.663333,0.624371,0.59125,0.182833,1318,4220,5538
198 | 197,2011-07-16,3,0,7,0,6,0,1,0.686667,0.638263,0.585,0.208342,2418,3505,5923
199 | 198,2011-07-17,3,0,7,0,0,0,1,0.719167,0.669833,0.604167,0.245033,2006,3296,5302
200 | 199,2011-07-18,3,0,7,0,1,1,1,0.746667,0.703925,0.65125,0.215804,841,3617,4458
201 | 200,2011-07-19,3,0,7,0,2,1,1,0.776667,0.747479,0.650417,0.1306,752,3789,4541
202 | 201,2011-07-20,3,0,7,0,3,1,1,0.768333,0.74685,0.707083,0.113817,644,3688,4332
203 | 202,2011-07-21,3,0,7,0,4,1,2,0.815,0.826371,0.69125,0.222021,632,3152,3784
204 | 203,2011-07-22,3,0,7,0,5,1,1,0.848333,0.840896,0.580417,0.1331,562,2825,3387
205 | 204,2011-07-23,3,0,7,0,6,0,1,0.849167,0.804287,0.5,0.131221,987,2298,3285
206 | 205,2011-07-24,3,0,7,0,0,0,1,0.83,0.794829,0.550833,0.169171,1050,2556,3606
207 | 206,2011-07-25,3,0,7,0,1,1,1,0.743333,0.720958,0.757083,0.0908083,568,3272,3840
208 | 207,2011-07-26,3,0,7,0,2,1,1,0.771667,0.696979,0.540833,0.200258,750,3840,4590
209 | 208,2011-07-27,3,0,7,0,3,1,1,0.775,0.690667,0.402917,0.183463,755,3901,4656
210 | 209,2011-07-28,3,0,7,0,4,1,1,0.779167,0.7399,0.583333,0.178479,606,3784,4390
211 | 210,2011-07-29,3,0,7,0,5,1,1,0.838333,0.785967,0.5425,0.174138,670,3176,3846
212 | 211,2011-07-30,3,0,7,0,6,0,1,0.804167,0.728537,0.465833,0.168537,1559,2916,4475
213 | 212,2011-07-31,3,0,7,0,0,0,1,0.805833,0.729796,0.480833,0.164813,1524,2778,4302
214 | 213,2011-08-01,3,0,8,0,1,1,1,0.771667,0.703292,0.550833,0.156717,729,3537,4266
215 | 214,2011-08-02,3,0,8,0,2,1,1,0.783333,0.707071,0.49125,0.20585,801,4044,4845
216 | 215,2011-08-03,3,0,8,0,3,1,2,0.731667,0.679937,0.6575,0.135583,467,3107,3574
217 | 216,2011-08-04,3,0,8,0,4,1,2,0.71,0.664788,0.7575,0.19715,799,3777,4576
218 | 217,2011-08-05,3,0,8,0,5,1,1,0.710833,0.656567,0.630833,0.184696,1023,3843,4866
219 | 218,2011-08-06,3,0,8,0,6,0,2,0.716667,0.676154,0.755,0.22825,1521,2773,4294
220 | 219,2011-08-07,3,0,8,0,0,0,1,0.7425,0.715292,0.752917,0.201487,1298,2487,3785
221 | 220,2011-08-08,3,0,8,0,1,1,1,0.765,0.703283,0.592083,0.192175,846,3480,4326
222 | 221,2011-08-09,3,0,8,0,2,1,1,0.775,0.724121,0.570417,0.151121,907,3695,4602
223 | 222,2011-08-10,3,0,8,0,3,1,1,0.766667,0.684983,0.424167,0.200258,884,3896,4780
224 | 223,2011-08-11,3,0,8,0,4,1,1,0.7175,0.651521,0.42375,0.164796,812,3980,4792
225 | 224,2011-08-12,3,0,8,0,5,1,1,0.708333,0.654042,0.415,0.125621,1051,3854,4905
226 | 225,2011-08-13,3,0,8,0,6,0,2,0.685833,0.645858,0.729583,0.211454,1504,2646,4150
227 | 226,2011-08-14,3,0,8,0,0,0,2,0.676667,0.624388,0.8175,0.222633,1338,2482,3820
228 | 227,2011-08-15,3,0,8,0,1,1,1,0.665833,0.616167,0.712083,0.208954,775,3563,4338
229 | 228,2011-08-16,3,0,8,0,2,1,1,0.700833,0.645837,0.578333,0.236329,721,4004,4725
230 | 229,2011-08-17,3,0,8,0,3,1,1,0.723333,0.666671,0.575417,0.143667,668,4026,4694
231 | 230,2011-08-18,3,0,8,0,4,1,1,0.711667,0.662258,0.654583,0.233208,639,3166,3805
232 | 231,2011-08-19,3,0,8,0,5,1,2,0.685,0.633221,0.722917,0.139308,797,3356,4153
233 | 232,2011-08-20,3,0,8,0,6,0,1,0.6975,0.648996,0.674167,0.104467,1914,3277,5191
234 | 233,2011-08-21,3,0,8,0,0,0,1,0.710833,0.675525,0.77,0.248754,1249,2624,3873
235 | 234,2011-08-22,3,0,8,0,1,1,1,0.691667,0.638254,0.47,0.27675,833,3925,4758
236 | 235,2011-08-23,3,0,8,0,2,1,1,0.640833,0.606067,0.455417,0.146763,1281,4614,5895
237 | 236,2011-08-24,3,0,8,0,3,1,1,0.673333,0.630692,0.605,0.253108,949,4181,5130
238 | 237,2011-08-25,3,0,8,0,4,1,2,0.684167,0.645854,0.771667,0.210833,435,3107,3542
239 | 238,2011-08-26,3,0,8,0,5,1,1,0.7,0.659733,0.76125,0.0839625,768,3893,4661
240 | 239,2011-08-27,3,0,8,0,6,0,2,0.68,0.635556,0.85,0.375617,226,889,1115
241 | 240,2011-08-28,3,0,8,0,0,0,1,0.707059,0.647959,0.561765,0.304659,1415,2919,4334
242 | 241,2011-08-29,3,0,8,0,1,1,1,0.636667,0.607958,0.554583,0.159825,729,3905,4634
243 | 242,2011-08-30,3,0,8,0,2,1,1,0.639167,0.594704,0.548333,0.125008,775,4429,5204
244 | 243,2011-08-31,3,0,8,0,3,1,1,0.656667,0.611121,0.597917,0.0833333,688,4370,5058
245 | 244,2011-09-01,3,0,9,0,4,1,1,0.655,0.614921,0.639167,0.141796,783,4332,5115
246 | 245,2011-09-02,3,0,9,0,5,1,2,0.643333,0.604808,0.727083,0.139929,875,3852,4727
247 | 246,2011-09-03,3,0,9,0,6,0,1,0.669167,0.633213,0.716667,0.185325,1935,2549,4484
248 | 247,2011-09-04,3,0,9,0,0,0,1,0.709167,0.665429,0.742083,0.206467,2521,2419,4940
249 | 248,2011-09-05,3,0,9,1,1,0,2,0.673333,0.625646,0.790417,0.212696,1236,2115,3351
250 | 249,2011-09-06,3,0,9,0,2,1,3,0.54,0.5152,0.886957,0.343943,204,2506,2710
251 | 250,2011-09-07,3,0,9,0,3,1,3,0.599167,0.544229,0.917083,0.0970208,118,1878,1996
252 | 251,2011-09-08,3,0,9,0,4,1,3,0.633913,0.555361,0.939565,0.192748,153,1689,1842
253 | 252,2011-09-09,3,0,9,0,5,1,2,0.65,0.578946,0.897917,0.124379,417,3127,3544
254 | 253,2011-09-10,3,0,9,0,6,0,1,0.66,0.607962,0.75375,0.153608,1750,3595,5345
255 | 254,2011-09-11,3,0,9,0,0,0,1,0.653333,0.609229,0.71375,0.115054,1633,3413,5046
256 | 255,2011-09-12,3,0,9,0,1,1,1,0.644348,0.60213,0.692174,0.088913,690,4023,4713
257 | 256,2011-09-13,3,0,9,0,2,1,1,0.650833,0.603554,0.7125,0.141804,701,4062,4763
258 | 257,2011-09-14,3,0,9,0,3,1,1,0.673333,0.6269,0.697083,0.1673,647,4138,4785
259 | 258,2011-09-15,3,0,9,0,4,1,2,0.5775,0.553671,0.709167,0.271146,428,3231,3659
260 | 259,2011-09-16,3,0,9,0,5,1,2,0.469167,0.461475,0.590417,0.164183,742,4018,4760
261 | 260,2011-09-17,3,0,9,0,6,0,2,0.491667,0.478512,0.718333,0.189675,1434,3077,4511
262 | 261,2011-09-18,3,0,9,0,0,0,1,0.5075,0.490537,0.695,0.178483,1353,2921,4274
263 | 262,2011-09-19,3,0,9,0,1,1,2,0.549167,0.529675,0.69,0.151742,691,3848,4539
264 | 263,2011-09-20,3,0,9,0,2,1,2,0.561667,0.532217,0.88125,0.134954,438,3203,3641
265 | 264,2011-09-21,3,0,9,0,3,1,2,0.595,0.550533,0.9,0.0964042,539,3813,4352
266 | 265,2011-09-22,3,0,9,0,4,1,2,0.628333,0.554963,0.902083,0.128125,555,4240,4795
267 | 266,2011-09-23,4,0,9,0,5,1,2,0.609167,0.522125,0.9725,0.0783667,258,2137,2395
268 | 267,2011-09-24,4,0,9,0,6,0,2,0.606667,0.564412,0.8625,0.0783833,1776,3647,5423
269 | 268,2011-09-25,4,0,9,0,0,0,2,0.634167,0.572637,0.845,0.0503792,1544,3466,5010
270 | 269,2011-09-26,4,0,9,0,1,1,2,0.649167,0.589042,0.848333,0.1107,684,3946,4630
271 | 270,2011-09-27,4,0,9,0,2,1,2,0.636667,0.574525,0.885417,0.118171,477,3643,4120
272 | 271,2011-09-28,4,0,9,0,3,1,2,0.635,0.575158,0.84875,0.148629,480,3427,3907
273 | 272,2011-09-29,4,0,9,0,4,1,1,0.616667,0.574512,0.699167,0.172883,653,4186,4839
274 | 273,2011-09-30,4,0,9,0,5,1,1,0.564167,0.544829,0.6475,0.206475,830,4372,5202
275 | 274,2011-10-01,4,0,10,0,6,0,2,0.41,0.412863,0.75375,0.292296,480,1949,2429
276 | 275,2011-10-02,4,0,10,0,0,0,2,0.356667,0.345317,0.791667,0.222013,616,2302,2918
277 | 276,2011-10-03,4,0,10,0,1,1,2,0.384167,0.392046,0.760833,0.0833458,330,3240,3570
278 | 277,2011-10-04,4,0,10,0,2,1,1,0.484167,0.472858,0.71,0.205854,486,3970,4456
279 | 278,2011-10-05,4,0,10,0,3,1,1,0.538333,0.527138,0.647917,0.17725,559,4267,4826
280 | 279,2011-10-06,4,0,10,0,4,1,1,0.494167,0.480425,0.620833,0.134954,639,4126,4765
281 | 280,2011-10-07,4,0,10,0,5,1,1,0.510833,0.504404,0.684167,0.0223917,949,4036,4985
282 | 281,2011-10-08,4,0,10,0,6,0,1,0.521667,0.513242,0.70125,0.0454042,2235,3174,5409
283 | 282,2011-10-09,4,0,10,0,0,0,1,0.540833,0.523983,0.7275,0.06345,2397,3114,5511
284 | 283,2011-10-10,4,0,10,1,1,0,1,0.570833,0.542925,0.73375,0.0423042,1514,3603,5117
285 | 284,2011-10-11,4,0,10,0,2,1,2,0.566667,0.546096,0.80875,0.143042,667,3896,4563
286 | 285,2011-10-12,4,0,10,0,3,1,3,0.543333,0.517717,0.90625,0.24815,217,2199,2416
287 | 286,2011-10-13,4,0,10,0,4,1,2,0.589167,0.551804,0.896667,0.141787,290,2623,2913
288 | 287,2011-10-14,4,0,10,0,5,1,2,0.550833,0.529675,0.71625,0.223883,529,3115,3644
289 | 288,2011-10-15,4,0,10,0,6,0,1,0.506667,0.498725,0.483333,0.258083,1899,3318,5217
290 | 289,2011-10-16,4,0,10,0,0,0,1,0.511667,0.503154,0.486667,0.281717,1748,3293,5041
291 | 290,2011-10-17,4,0,10,0,1,1,1,0.534167,0.510725,0.579583,0.175379,713,3857,4570
292 | 291,2011-10-18,4,0,10,0,2,1,2,0.5325,0.522721,0.701667,0.110087,637,4111,4748
293 | 292,2011-10-19,4,0,10,0,3,1,3,0.541739,0.513848,0.895217,0.243339,254,2170,2424
294 | 293,2011-10-20,4,0,10,0,4,1,1,0.475833,0.466525,0.63625,0.422275,471,3724,4195
295 | 294,2011-10-21,4,0,10,0,5,1,1,0.4275,0.423596,0.574167,0.221396,676,3628,4304
296 | 295,2011-10-22,4,0,10,0,6,0,1,0.4225,0.425492,0.629167,0.0926667,1499,2809,4308
297 | 296,2011-10-23,4,0,10,0,0,0,1,0.421667,0.422333,0.74125,0.0995125,1619,2762,4381
298 | 297,2011-10-24,4,0,10,0,1,1,1,0.463333,0.457067,0.772083,0.118792,699,3488,4187
299 | 298,2011-10-25,4,0,10,0,2,1,1,0.471667,0.463375,0.622917,0.166658,695,3992,4687
300 | 299,2011-10-26,4,0,10,0,3,1,2,0.484167,0.472846,0.720417,0.148642,404,3490,3894
301 | 300,2011-10-27,4,0,10,0,4,1,2,0.47,0.457046,0.812917,0.197763,240,2419,2659
302 | 301,2011-10-28,4,0,10,0,5,1,2,0.330833,0.318812,0.585833,0.229479,456,3291,3747
303 | 302,2011-10-29,4,0,10,0,6,0,3,0.254167,0.227913,0.8825,0.351371,57,570,627
304 | 303,2011-10-30,4,0,10,0,0,0,1,0.319167,0.321329,0.62375,0.176617,885,2446,3331
305 | 304,2011-10-31,4,0,10,0,1,1,1,0.34,0.356063,0.703333,0.10635,362,3307,3669
306 | 305,2011-11-01,4,0,11,0,2,1,1,0.400833,0.397088,0.68375,0.135571,410,3658,4068
307 | 306,2011-11-02,4,0,11,0,3,1,1,0.3775,0.390133,0.71875,0.0820917,370,3816,4186
308 | 307,2011-11-03,4,0,11,0,4,1,1,0.408333,0.405921,0.702083,0.136817,318,3656,3974
309 | 308,2011-11-04,4,0,11,0,5,1,2,0.403333,0.403392,0.6225,0.271779,470,3576,4046
310 | 309,2011-11-05,4,0,11,0,6,0,1,0.326667,0.323854,0.519167,0.189062,1156,2770,3926
311 | 310,2011-11-06,4,0,11,0,0,0,1,0.348333,0.362358,0.734583,0.0920542,952,2697,3649
312 | 311,2011-11-07,4,0,11,0,1,1,1,0.395,0.400871,0.75875,0.057225,373,3662,4035
313 | 312,2011-11-08,4,0,11,0,2,1,1,0.408333,0.412246,0.721667,0.0690375,376,3829,4205
314 | 313,2011-11-09,4,0,11,0,3,1,1,0.4,0.409079,0.758333,0.0621958,305,3804,4109
315 | 314,2011-11-10,4,0,11,0,4,1,2,0.38,0.373721,0.813333,0.189067,190,2743,2933
316 | 315,2011-11-11,4,0,11,1,5,0,1,0.324167,0.306817,0.44625,0.314675,440,2928,3368
317 | 316,2011-11-12,4,0,11,0,6,0,1,0.356667,0.357942,0.552917,0.212062,1275,2792,4067
318 | 317,2011-11-13,4,0,11,0,0,0,1,0.440833,0.43055,0.458333,0.281721,1004,2713,3717
319 | 318,2011-11-14,4,0,11,0,1,1,1,0.53,0.524612,0.587083,0.306596,595,3891,4486
320 | 319,2011-11-15,4,0,11,0,2,1,2,0.53,0.507579,0.68875,0.199633,449,3746,4195
321 | 320,2011-11-16,4,0,11,0,3,1,3,0.456667,0.451988,0.93,0.136829,145,1672,1817
322 | 321,2011-11-17,4,0,11,0,4,1,2,0.341667,0.323221,0.575833,0.305362,139,2914,3053
323 | 322,2011-11-18,4,0,11,0,5,1,1,0.274167,0.272721,0.41,0.168533,245,3147,3392
324 | 323,2011-11-19,4,0,11,0,6,0,1,0.329167,0.324483,0.502083,0.224496,943,2720,3663
325 | 324,2011-11-20,4,0,11,0,0,0,2,0.463333,0.457058,0.684583,0.18595,787,2733,3520
326 | 325,2011-11-21,4,0,11,0,1,1,3,0.4475,0.445062,0.91,0.138054,220,2545,2765
327 | 326,2011-11-22,4,0,11,0,2,1,3,0.416667,0.421696,0.9625,0.118792,69,1538,1607
328 | 327,2011-11-23,4,0,11,0,3,1,2,0.440833,0.430537,0.757917,0.335825,112,2454,2566
329 | 328,2011-11-24,4,0,11,1,4,0,1,0.373333,0.372471,0.549167,0.167304,560,935,1495
330 | 329,2011-11-25,4,0,11,0,5,1,1,0.375,0.380671,0.64375,0.0988958,1095,1697,2792
331 | 330,2011-11-26,4,0,11,0,6,0,1,0.375833,0.385087,0.681667,0.0684208,1249,1819,3068
332 | 331,2011-11-27,4,0,11,0,0,0,1,0.459167,0.4558,0.698333,0.208954,810,2261,3071
333 | 332,2011-11-28,4,0,11,0,1,1,1,0.503478,0.490122,0.743043,0.142122,253,3614,3867
334 | 333,2011-11-29,4,0,11,0,2,1,2,0.458333,0.451375,0.830833,0.258092,96,2818,2914
335 | 334,2011-11-30,4,0,11,0,3,1,1,0.325,0.311221,0.613333,0.271158,188,3425,3613
336 | 335,2011-12-01,4,0,12,0,4,1,1,0.3125,0.305554,0.524583,0.220158,182,3545,3727
337 | 336,2011-12-02,4,0,12,0,5,1,1,0.314167,0.331433,0.625833,0.100754,268,3672,3940
338 | 337,2011-12-03,4,0,12,0,6,0,1,0.299167,0.310604,0.612917,0.0957833,706,2908,3614
339 | 338,2011-12-04,4,0,12,0,0,0,1,0.330833,0.3491,0.775833,0.0839583,634,2851,3485
340 | 339,2011-12-05,4,0,12,0,1,1,2,0.385833,0.393925,0.827083,0.0622083,233,3578,3811
341 | 340,2011-12-06,4,0,12,0,2,1,3,0.4625,0.4564,0.949583,0.232583,126,2468,2594
342 | 341,2011-12-07,4,0,12,0,3,1,3,0.41,0.400246,0.970417,0.266175,50,655,705
343 | 342,2011-12-08,4,0,12,0,4,1,1,0.265833,0.256938,0.58,0.240058,150,3172,3322
344 | 343,2011-12-09,4,0,12,0,5,1,1,0.290833,0.317542,0.695833,0.0827167,261,3359,3620
345 | 344,2011-12-10,4,0,12,0,6,0,1,0.275,0.266412,0.5075,0.233221,502,2688,3190
346 | 345,2011-12-11,4,0,12,0,0,0,1,0.220833,0.253154,0.49,0.0665417,377,2366,2743
347 | 346,2011-12-12,4,0,12,0,1,1,1,0.238333,0.270196,0.670833,0.06345,143,3167,3310
348 | 347,2011-12-13,4,0,12,0,2,1,1,0.2825,0.301138,0.59,0.14055,155,3368,3523
349 | 348,2011-12-14,4,0,12,0,3,1,2,0.3175,0.338362,0.66375,0.0609583,178,3562,3740
350 | 349,2011-12-15,4,0,12,0,4,1,2,0.4225,0.412237,0.634167,0.268042,181,3528,3709
351 | 350,2011-12-16,4,0,12,0,5,1,2,0.375,0.359825,0.500417,0.260575,178,3399,3577
352 | 351,2011-12-17,4,0,12,0,6,0,2,0.258333,0.249371,0.560833,0.243167,275,2464,2739
353 | 352,2011-12-18,4,0,12,0,0,0,1,0.238333,0.245579,0.58625,0.169779,220,2211,2431
354 | 353,2011-12-19,4,0,12,0,1,1,1,0.276667,0.280933,0.6375,0.172896,260,3143,3403
355 | 354,2011-12-20,4,0,12,0,2,1,2,0.385833,0.396454,0.595417,0.0615708,216,3534,3750
356 | 355,2011-12-21,1,0,12,0,3,1,2,0.428333,0.428017,0.858333,0.2214,107,2553,2660
357 | 356,2011-12-22,1,0,12,0,4,1,2,0.423333,0.426121,0.7575,0.047275,227,2841,3068
358 | 357,2011-12-23,1,0,12,0,5,1,1,0.373333,0.377513,0.68625,0.274246,163,2046,2209
359 | 358,2011-12-24,1,0,12,0,6,0,1,0.3025,0.299242,0.5425,0.190304,155,856,1011
360 | 359,2011-12-25,1,0,12,0,0,0,1,0.274783,0.279961,0.681304,0.155091,303,451,754
361 | 360,2011-12-26,1,0,12,1,1,0,1,0.321739,0.315535,0.506957,0.239465,430,887,1317
362 | 361,2011-12-27,1,0,12,0,2,1,2,0.325,0.327633,0.7625,0.18845,103,1059,1162
363 | 362,2011-12-28,1,0,12,0,3,1,1,0.29913,0.279974,0.503913,0.293961,255,2047,2302
364 | 363,2011-12-29,1,0,12,0,4,1,1,0.248333,0.263892,0.574167,0.119412,254,2169,2423
365 | 364,2011-12-30,1,0,12,0,5,1,1,0.311667,0.318812,0.636667,0.134337,491,2508,2999
366 | 365,2011-12-31,1,0,12,0,6,0,1,0.41,0.414121,0.615833,0.220154,665,1820,2485
367 | 366,2012-01-01,1,1,1,0,0,0,1,0.37,0.375621,0.6925,0.192167,686,1608,2294
368 | 367,2012-01-02,1,1,1,1,1,0,1,0.273043,0.252304,0.381304,0.329665,244,1707,1951
369 | 368,2012-01-03,1,1,1,0,2,1,1,0.15,0.126275,0.44125,0.365671,89,2147,2236
370 | 369,2012-01-04,1,1,1,0,3,1,2,0.1075,0.119337,0.414583,0.1847,95,2273,2368
371 | 370,2012-01-05,1,1,1,0,4,1,1,0.265833,0.278412,0.524167,0.129987,140,3132,3272
372 | 371,2012-01-06,1,1,1,0,5,1,1,0.334167,0.340267,0.542083,0.167908,307,3791,4098
373 | 372,2012-01-07,1,1,1,0,6,0,1,0.393333,0.390779,0.531667,0.174758,1070,3451,4521
374 | 373,2012-01-08,1,1,1,0,0,0,1,0.3375,0.340258,0.465,0.191542,599,2826,3425
375 | 374,2012-01-09,1,1,1,0,1,1,2,0.224167,0.247479,0.701667,0.0989,106,2270,2376
376 | 375,2012-01-10,1,1,1,0,2,1,1,0.308696,0.318826,0.646522,0.187552,173,3425,3598
377 | 376,2012-01-11,1,1,1,0,3,1,2,0.274167,0.282821,0.8475,0.131221,92,2085,2177
378 | 377,2012-01-12,1,1,1,0,4,1,2,0.3825,0.381938,0.802917,0.180967,269,3828,4097
379 | 378,2012-01-13,1,1,1,0,5,1,1,0.274167,0.249362,0.5075,0.378108,174,3040,3214
380 | 379,2012-01-14,1,1,1,0,6,0,1,0.18,0.183087,0.4575,0.187183,333,2160,2493
381 | 380,2012-01-15,1,1,1,0,0,0,1,0.166667,0.161625,0.419167,0.251258,284,2027,2311
382 | 381,2012-01-16,1,1,1,1,1,0,1,0.19,0.190663,0.5225,0.231358,217,2081,2298
383 | 382,2012-01-17,1,1,1,0,2,1,2,0.373043,0.364278,0.716087,0.34913,127,2808,2935
384 | 383,2012-01-18,1,1,1,0,3,1,1,0.303333,0.275254,0.443333,0.415429,109,3267,3376
385 | 384,2012-01-19,1,1,1,0,4,1,1,0.19,0.190038,0.4975,0.220158,130,3162,3292
386 | 385,2012-01-20,1,1,1,0,5,1,2,0.2175,0.220958,0.45,0.20275,115,3048,3163
387 | 386,2012-01-21,1,1,1,0,6,0,2,0.173333,0.174875,0.83125,0.222642,67,1234,1301
388 | 387,2012-01-22,1,1,1,0,0,0,2,0.1625,0.16225,0.79625,0.199638,196,1781,1977
389 | 388,2012-01-23,1,1,1,0,1,1,2,0.218333,0.243058,0.91125,0.110708,145,2287,2432
390 | 389,2012-01-24,1,1,1,0,2,1,1,0.3425,0.349108,0.835833,0.123767,439,3900,4339
391 | 390,2012-01-25,1,1,1,0,3,1,1,0.294167,0.294821,0.64375,0.161071,467,3803,4270
392 | 391,2012-01-26,1,1,1,0,4,1,2,0.341667,0.35605,0.769583,0.0733958,244,3831,4075
393 | 392,2012-01-27,1,1,1,0,5,1,2,0.425,0.415383,0.74125,0.342667,269,3187,3456
394 | 393,2012-01-28,1,1,1,0,6,0,1,0.315833,0.326379,0.543333,0.210829,775,3248,4023
395 | 394,2012-01-29,1,1,1,0,0,0,1,0.2825,0.272721,0.31125,0.24005,558,2685,3243
396 | 395,2012-01-30,1,1,1,0,1,1,1,0.269167,0.262625,0.400833,0.215792,126,3498,3624
397 | 396,2012-01-31,1,1,1,0,2,1,1,0.39,0.381317,0.416667,0.261817,324,4185,4509
398 | 397,2012-02-01,1,1,2,0,3,1,1,0.469167,0.466538,0.507917,0.189067,304,4275,4579
399 | 398,2012-02-02,1,1,2,0,4,1,2,0.399167,0.398971,0.672917,0.187187,190,3571,3761
400 | 399,2012-02-03,1,1,2,0,5,1,1,0.313333,0.309346,0.526667,0.178496,310,3841,4151
401 | 400,2012-02-04,1,1,2,0,6,0,2,0.264167,0.272725,0.779583,0.121896,384,2448,2832
402 | 401,2012-02-05,1,1,2,0,0,0,2,0.265833,0.264521,0.687917,0.175996,318,2629,2947
403 | 402,2012-02-06,1,1,2,0,1,1,1,0.282609,0.296426,0.622174,0.1538,206,3578,3784
404 | 403,2012-02-07,1,1,2,0,2,1,1,0.354167,0.361104,0.49625,0.147379,199,4176,4375
405 | 404,2012-02-08,1,1,2,0,3,1,2,0.256667,0.266421,0.722917,0.133721,109,2693,2802
406 | 405,2012-02-09,1,1,2,0,4,1,1,0.265,0.261988,0.562083,0.194037,163,3667,3830
407 | 406,2012-02-10,1,1,2,0,5,1,2,0.280833,0.293558,0.54,0.116929,227,3604,3831
408 | 407,2012-02-11,1,1,2,0,6,0,3,0.224167,0.210867,0.73125,0.289796,192,1977,2169
409 | 408,2012-02-12,1,1,2,0,0,0,1,0.1275,0.101658,0.464583,0.409212,73,1456,1529
410 | 409,2012-02-13,1,1,2,0,1,1,1,0.2225,0.227913,0.41125,0.167283,94,3328,3422
411 | 410,2012-02-14,1,1,2,0,2,1,2,0.319167,0.333946,0.50875,0.141179,135,3787,3922
412 | 411,2012-02-15,1,1,2,0,3,1,1,0.348333,0.351629,0.53125,0.1816,141,4028,4169
413 | 412,2012-02-16,1,1,2,0,4,1,2,0.316667,0.330162,0.752917,0.091425,74,2931,3005
414 | 413,2012-02-17,1,1,2,0,5,1,1,0.343333,0.351629,0.634583,0.205846,349,3805,4154
415 | 414,2012-02-18,1,1,2,0,6,0,1,0.346667,0.355425,0.534583,0.190929,1435,2883,4318
416 | 415,2012-02-19,1,1,2,0,0,0,2,0.28,0.265788,0.515833,0.253112,618,2071,2689
417 | 416,2012-02-20,1,1,2,1,1,0,1,0.28,0.273391,0.507826,0.229083,502,2627,3129
418 | 417,2012-02-21,1,1,2,0,2,1,1,0.287826,0.295113,0.594348,0.205717,163,3614,3777
419 | 418,2012-02-22,1,1,2,0,3,1,1,0.395833,0.392667,0.567917,0.234471,394,4379,4773
420 | 419,2012-02-23,1,1,2,0,4,1,1,0.454167,0.444446,0.554583,0.190913,516,4546,5062
421 | 420,2012-02-24,1,1,2,0,5,1,2,0.4075,0.410971,0.7375,0.237567,246,3241,3487
422 | 421,2012-02-25,1,1,2,0,6,0,1,0.290833,0.255675,0.395833,0.421642,317,2415,2732
423 | 422,2012-02-26,1,1,2,0,0,0,1,0.279167,0.268308,0.41,0.205229,515,2874,3389
424 | 423,2012-02-27,1,1,2,0,1,1,1,0.366667,0.357954,0.490833,0.268033,253,4069,4322
425 | 424,2012-02-28,1,1,2,0,2,1,1,0.359167,0.353525,0.395833,0.193417,229,4134,4363
426 | 425,2012-02-29,1,1,2,0,3,1,2,0.344348,0.34847,0.804783,0.179117,65,1769,1834
427 | 426,2012-03-01,1,1,3,0,4,1,1,0.485833,0.475371,0.615417,0.226987,325,4665,4990
428 | 427,2012-03-02,1,1,3,0,5,1,2,0.353333,0.359842,0.657083,0.144904,246,2948,3194
429 | 428,2012-03-03,1,1,3,0,6,0,2,0.414167,0.413492,0.62125,0.161079,956,3110,4066
430 | 429,2012-03-04,1,1,3,0,0,0,1,0.325833,0.303021,0.403333,0.334571,710,2713,3423
431 | 430,2012-03-05,1,1,3,0,1,1,1,0.243333,0.241171,0.50625,0.228858,203,3130,3333
432 | 431,2012-03-06,1,1,3,0,2,1,1,0.258333,0.255042,0.456667,0.200875,221,3735,3956
433 | 432,2012-03-07,1,1,3,0,3,1,1,0.404167,0.3851,0.513333,0.345779,432,4484,4916
434 | 433,2012-03-08,1,1,3,0,4,1,1,0.5275,0.524604,0.5675,0.441563,486,4896,5382
435 | 434,2012-03-09,1,1,3,0,5,1,2,0.410833,0.397083,0.407083,0.4148,447,4122,4569
436 | 435,2012-03-10,1,1,3,0,6,0,1,0.2875,0.277767,0.350417,0.22575,968,3150,4118
437 | 436,2012-03-11,1,1,3,0,0,0,1,0.361739,0.35967,0.476957,0.222587,1658,3253,4911
438 | 437,2012-03-12,1,1,3,0,1,1,1,0.466667,0.459592,0.489167,0.207713,838,4460,5298
439 | 438,2012-03-13,1,1,3,0,2,1,1,0.565,0.542929,0.6175,0.23695,762,5085,5847
440 | 439,2012-03-14,1,1,3,0,3,1,1,0.5725,0.548617,0.507083,0.115062,997,5315,6312
441 | 440,2012-03-15,1,1,3,0,4,1,1,0.5575,0.532825,0.579583,0.149883,1005,5187,6192
442 | 441,2012-03-16,1,1,3,0,5,1,2,0.435833,0.436229,0.842083,0.113192,548,3830,4378
443 | 442,2012-03-17,1,1,3,0,6,0,2,0.514167,0.505046,0.755833,0.110704,3155,4681,7836
444 | 443,2012-03-18,1,1,3,0,0,0,2,0.4725,0.464,0.81,0.126883,2207,3685,5892
445 | 444,2012-03-19,1,1,3,0,1,1,1,0.545,0.532821,0.72875,0.162317,982,5171,6153
446 | 445,2012-03-20,1,1,3,0,2,1,1,0.560833,0.538533,0.807917,0.121271,1051,5042,6093
447 | 446,2012-03-21,2,1,3,0,3,1,2,0.531667,0.513258,0.82125,0.0895583,1122,5108,6230
448 | 447,2012-03-22,2,1,3,0,4,1,1,0.554167,0.531567,0.83125,0.117562,1334,5537,6871
449 | 448,2012-03-23,2,1,3,0,5,1,2,0.601667,0.570067,0.694167,0.1163,2469,5893,8362
450 | 449,2012-03-24,2,1,3,0,6,0,2,0.5025,0.486733,0.885417,0.192783,1033,2339,3372
451 | 450,2012-03-25,2,1,3,0,0,0,2,0.4375,0.437488,0.880833,0.220775,1532,3464,4996
452 | 451,2012-03-26,2,1,3,0,1,1,1,0.445833,0.43875,0.477917,0.386821,795,4763,5558
453 | 452,2012-03-27,2,1,3,0,2,1,1,0.323333,0.315654,0.29,0.187192,531,4571,5102
454 | 453,2012-03-28,2,1,3,0,3,1,1,0.484167,0.47095,0.48125,0.291671,674,5024,5698
455 | 454,2012-03-29,2,1,3,0,4,1,1,0.494167,0.482304,0.439167,0.31965,834,5299,6133
456 | 455,2012-03-30,2,1,3,0,5,1,2,0.37,0.375621,0.580833,0.138067,796,4663,5459
457 | 456,2012-03-31,2,1,3,0,6,0,2,0.424167,0.421708,0.738333,0.250617,2301,3934,6235
458 | 457,2012-04-01,2,1,4,0,0,0,2,0.425833,0.417287,0.67625,0.172267,2347,3694,6041
459 | 458,2012-04-02,2,1,4,0,1,1,1,0.433913,0.427513,0.504348,0.312139,1208,4728,5936
460 | 459,2012-04-03,2,1,4,0,2,1,1,0.466667,0.461483,0.396667,0.100133,1348,5424,6772
461 | 460,2012-04-04,2,1,4,0,3,1,1,0.541667,0.53345,0.469583,0.180975,1058,5378,6436
462 | 461,2012-04-05,2,1,4,0,4,1,1,0.435,0.431163,0.374167,0.219529,1192,5265,6457
463 | 462,2012-04-06,2,1,4,0,5,1,1,0.403333,0.390767,0.377083,0.300388,1807,4653,6460
464 | 463,2012-04-07,2,1,4,0,6,0,1,0.4375,0.426129,0.254167,0.274871,3252,3605,6857
465 | 464,2012-04-08,2,1,4,0,0,0,1,0.5,0.492425,0.275833,0.232596,2230,2939,5169
466 | 465,2012-04-09,2,1,4,0,1,1,1,0.489167,0.476638,0.3175,0.358196,905,4680,5585
467 | 466,2012-04-10,2,1,4,0,2,1,1,0.446667,0.436233,0.435,0.249375,819,5099,5918
468 | 467,2012-04-11,2,1,4,0,3,1,1,0.348696,0.337274,0.469565,0.295274,482,4380,4862
469 | 468,2012-04-12,2,1,4,0,4,1,1,0.3975,0.387604,0.46625,0.290429,663,4746,5409
470 | 469,2012-04-13,2,1,4,0,5,1,1,0.4425,0.431808,0.408333,0.155471,1252,5146,6398
471 | 470,2012-04-14,2,1,4,0,6,0,1,0.495,0.487996,0.502917,0.190917,2795,4665,7460
472 | 471,2012-04-15,2,1,4,0,0,0,1,0.606667,0.573875,0.507917,0.225129,2846,4286,7132
473 | 472,2012-04-16,2,1,4,1,1,0,1,0.664167,0.614925,0.561667,0.284829,1198,5172,6370
474 | 473,2012-04-17,2,1,4,0,2,1,1,0.608333,0.598487,0.390417,0.273629,989,5702,6691
475 | 474,2012-04-18,2,1,4,0,3,1,2,0.463333,0.457038,0.569167,0.167912,347,4020,4367
476 | 475,2012-04-19,2,1,4,0,4,1,1,0.498333,0.493046,0.6125,0.0659292,846,5719,6565
477 | 476,2012-04-20,2,1,4,0,5,1,1,0.526667,0.515775,0.694583,0.149871,1340,5950,7290
478 | 477,2012-04-21,2,1,4,0,6,0,1,0.57,0.542921,0.682917,0.283587,2541,4083,6624
479 | 478,2012-04-22,2,1,4,0,0,0,3,0.396667,0.389504,0.835417,0.344546,120,907,1027
480 | 479,2012-04-23,2,1,4,0,1,1,2,0.321667,0.301125,0.766667,0.303496,195,3019,3214
481 | 480,2012-04-24,2,1,4,0,2,1,1,0.413333,0.405283,0.454167,0.249383,518,5115,5633
482 | 481,2012-04-25,2,1,4,0,3,1,1,0.476667,0.470317,0.427917,0.118792,655,5541,6196
483 | 482,2012-04-26,2,1,4,0,4,1,2,0.498333,0.483583,0.756667,0.176625,475,4551,5026
484 | 483,2012-04-27,2,1,4,0,5,1,1,0.4575,0.452637,0.400833,0.347633,1014,5219,6233
485 | 484,2012-04-28,2,1,4,0,6,0,2,0.376667,0.377504,0.489583,0.129975,1120,3100,4220
486 | 485,2012-04-29,2,1,4,0,0,0,1,0.458333,0.450121,0.587083,0.116908,2229,4075,6304
487 | 486,2012-04-30,2,1,4,0,1,1,2,0.464167,0.457696,0.57,0.171638,665,4907,5572
488 | 487,2012-05-01,2,1,5,0,2,1,2,0.613333,0.577021,0.659583,0.156096,653,5087,5740
489 | 488,2012-05-02,2,1,5,0,3,1,1,0.564167,0.537896,0.797083,0.138058,667,5502,6169
490 | 489,2012-05-03,2,1,5,0,4,1,2,0.56,0.537242,0.768333,0.133696,764,5657,6421
491 | 490,2012-05-04,2,1,5,0,5,1,1,0.6275,0.590917,0.735417,0.162938,1069,5227,6296
492 | 491,2012-05-05,2,1,5,0,6,0,2,0.621667,0.584608,0.756667,0.152992,2496,4387,6883
493 | 492,2012-05-06,2,1,5,0,0,0,2,0.5625,0.546737,0.74,0.149879,2135,4224,6359
494 | 493,2012-05-07,2,1,5,0,1,1,2,0.5375,0.527142,0.664167,0.230721,1008,5265,6273
495 | 494,2012-05-08,2,1,5,0,2,1,2,0.581667,0.557471,0.685833,0.296029,738,4990,5728
496 | 495,2012-05-09,2,1,5,0,3,1,2,0.575,0.553025,0.744167,0.216412,620,4097,4717
497 | 496,2012-05-10,2,1,5,0,4,1,1,0.505833,0.491783,0.552083,0.314063,1026,5546,6572
498 | 497,2012-05-11,2,1,5,0,5,1,1,0.533333,0.520833,0.360417,0.236937,1319,5711,7030
499 | 498,2012-05-12,2,1,5,0,6,0,1,0.564167,0.544817,0.480417,0.123133,2622,4807,7429
500 | 499,2012-05-13,2,1,5,0,0,0,1,0.6125,0.585238,0.57625,0.225117,2172,3946,6118
501 | 500,2012-05-14,2,1,5,0,1,1,2,0.573333,0.5499,0.789583,0.212692,342,2501,2843
502 | 501,2012-05-15,2,1,5,0,2,1,2,0.611667,0.576404,0.794583,0.147392,625,4490,5115
503 | 502,2012-05-16,2,1,5,0,3,1,1,0.636667,0.595975,0.697917,0.122512,991,6433,7424
504 | 503,2012-05-17,2,1,5,0,4,1,1,0.593333,0.572613,0.52,0.229475,1242,6142,7384
505 | 504,2012-05-18,2,1,5,0,5,1,1,0.564167,0.551121,0.523333,0.136817,1521,6118,7639
506 | 505,2012-05-19,2,1,5,0,6,0,1,0.6,0.566908,0.45625,0.083975,3410,4884,8294
507 | 506,2012-05-20,2,1,5,0,0,0,1,0.620833,0.583967,0.530417,0.254367,2704,4425,7129
508 | 507,2012-05-21,2,1,5,0,1,1,2,0.598333,0.565667,0.81125,0.233204,630,3729,4359
509 | 508,2012-05-22,2,1,5,0,2,1,2,0.615,0.580825,0.765833,0.118167,819,5254,6073
510 | 509,2012-05-23,2,1,5,0,3,1,2,0.621667,0.584612,0.774583,0.102,766,4494,5260
511 | 510,2012-05-24,2,1,5,0,4,1,1,0.655,0.6067,0.716667,0.172896,1059,5711,6770
512 | 511,2012-05-25,2,1,5,0,5,1,1,0.68,0.627529,0.747083,0.14055,1417,5317,6734
513 | 512,2012-05-26,2,1,5,0,6,0,1,0.6925,0.642696,0.7325,0.198992,2855,3681,6536
514 | 513,2012-05-27,2,1,5,0,0,0,1,0.69,0.641425,0.697083,0.215171,3283,3308,6591
515 | 514,2012-05-28,2,1,5,1,1,0,1,0.7125,0.6793,0.67625,0.196521,2557,3486,6043
516 | 515,2012-05-29,2,1,5,0,2,1,1,0.7225,0.672992,0.684583,0.2954,880,4863,5743
517 | 516,2012-05-30,2,1,5,0,3,1,2,0.656667,0.611129,0.67,0.134329,745,6110,6855
518 | 517,2012-05-31,2,1,5,0,4,1,1,0.68,0.631329,0.492917,0.195279,1100,6238,7338
519 | 518,2012-06-01,2,1,6,0,5,1,2,0.654167,0.607962,0.755417,0.237563,533,3594,4127
520 | 519,2012-06-02,2,1,6,0,6,0,1,0.583333,0.566288,0.549167,0.186562,2795,5325,8120
521 | 520,2012-06-03,2,1,6,0,0,0,1,0.6025,0.575133,0.493333,0.184087,2494,5147,7641
522 | 521,2012-06-04,2,1,6,0,1,1,1,0.5975,0.578283,0.487083,0.284833,1071,5927,6998
523 | 522,2012-06-05,2,1,6,0,2,1,2,0.540833,0.525892,0.613333,0.209575,968,6033,7001
524 | 523,2012-06-06,2,1,6,0,3,1,1,0.554167,0.542292,0.61125,0.077125,1027,6028,7055
525 | 524,2012-06-07,2,1,6,0,4,1,1,0.6025,0.569442,0.567083,0.15735,1038,6456,7494
526 | 525,2012-06-08,2,1,6,0,5,1,1,0.649167,0.597862,0.467917,0.175383,1488,6248,7736
527 | 526,2012-06-09,2,1,6,0,6,0,1,0.710833,0.648367,0.437083,0.144287,2708,4790,7498
528 | 527,2012-06-10,2,1,6,0,0,0,1,0.726667,0.663517,0.538333,0.133721,2224,4374,6598
529 | 528,2012-06-11,2,1,6,0,1,1,2,0.720833,0.659721,0.587917,0.207713,1017,5647,6664
530 | 529,2012-06-12,2,1,6,0,2,1,2,0.653333,0.597875,0.833333,0.214546,477,4495,4972
531 | 530,2012-06-13,2,1,6,0,3,1,1,0.655833,0.611117,0.582083,0.343279,1173,6248,7421
532 | 531,2012-06-14,2,1,6,0,4,1,1,0.648333,0.624383,0.569583,0.253733,1180,6183,7363
533 | 532,2012-06-15,2,1,6,0,5,1,1,0.639167,0.599754,0.589583,0.176617,1563,6102,7665
534 | 533,2012-06-16,2,1,6,0,6,0,1,0.631667,0.594708,0.504167,0.166667,2963,4739,7702
535 | 534,2012-06-17,2,1,6,0,0,0,1,0.5925,0.571975,0.59875,0.144904,2634,4344,6978
536 | 535,2012-06-18,2,1,6,0,1,1,2,0.568333,0.544842,0.777917,0.174746,653,4446,5099
537 | 536,2012-06-19,2,1,6,0,2,1,1,0.688333,0.654692,0.69,0.148017,968,5857,6825
538 | 537,2012-06-20,2,1,6,0,3,1,1,0.7825,0.720975,0.592083,0.113812,872,5339,6211
539 | 538,2012-06-21,3,1,6,0,4,1,1,0.805833,0.752542,0.567917,0.118787,778,5127,5905
540 | 539,2012-06-22,3,1,6,0,5,1,1,0.7775,0.724121,0.57375,0.182842,964,4859,5823
541 | 540,2012-06-23,3,1,6,0,6,0,1,0.731667,0.652792,0.534583,0.179721,2657,4801,7458
542 | 541,2012-06-24,3,1,6,0,0,0,1,0.743333,0.674254,0.479167,0.145525,2551,4340,6891
543 | 542,2012-06-25,3,1,6,0,1,1,1,0.715833,0.654042,0.504167,0.300383,1139,5640,6779
544 | 543,2012-06-26,3,1,6,0,2,1,1,0.630833,0.594704,0.373333,0.347642,1077,6365,7442
545 | 544,2012-06-27,3,1,6,0,3,1,1,0.6975,0.640792,0.36,0.271775,1077,6258,7335
546 | 545,2012-06-28,3,1,6,0,4,1,1,0.749167,0.675512,0.4225,0.17165,921,5958,6879
547 | 546,2012-06-29,3,1,6,0,5,1,1,0.834167,0.786613,0.48875,0.165417,829,4634,5463
548 | 547,2012-06-30,3,1,6,0,6,0,1,0.765,0.687508,0.60125,0.161071,1455,4232,5687
549 | 548,2012-07-01,3,1,7,0,0,0,1,0.815833,0.750629,0.51875,0.168529,1421,4110,5531
550 | 549,2012-07-02,3,1,7,0,1,1,1,0.781667,0.702038,0.447083,0.195267,904,5323,6227
551 | 550,2012-07-03,3,1,7,0,2,1,1,0.780833,0.70265,0.492083,0.126237,1052,5608,6660
552 | 551,2012-07-04,3,1,7,1,3,0,1,0.789167,0.732337,0.53875,0.13495,2562,4841,7403
553 | 552,2012-07-05,3,1,7,0,4,1,1,0.8275,0.761367,0.457917,0.194029,1405,4836,6241
554 | 553,2012-07-06,3,1,7,0,5,1,1,0.828333,0.752533,0.450833,0.146142,1366,4841,6207
555 | 554,2012-07-07,3,1,7,0,6,0,1,0.861667,0.804913,0.492083,0.163554,1448,3392,4840
556 | 555,2012-07-08,3,1,7,0,0,0,1,0.8225,0.790396,0.57375,0.125629,1203,3469,4672
557 | 556,2012-07-09,3,1,7,0,1,1,2,0.710833,0.654054,0.683333,0.180975,998,5571,6569
558 | 557,2012-07-10,3,1,7,0,2,1,2,0.720833,0.664796,0.6675,0.151737,954,5336,6290
559 | 558,2012-07-11,3,1,7,0,3,1,1,0.716667,0.650271,0.633333,0.151733,975,6289,7264
560 | 559,2012-07-12,3,1,7,0,4,1,1,0.715833,0.654683,0.529583,0.146775,1032,6414,7446
561 | 560,2012-07-13,3,1,7,0,5,1,2,0.731667,0.667933,0.485833,0.08085,1511,5988,7499
562 | 561,2012-07-14,3,1,7,0,6,0,2,0.703333,0.666042,0.699167,0.143679,2355,4614,6969
563 | 562,2012-07-15,3,1,7,0,0,0,1,0.745833,0.705196,0.717917,0.166667,1920,4111,6031
564 | 563,2012-07-16,3,1,7,0,1,1,1,0.763333,0.724125,0.645,0.164187,1088,5742,6830
565 | 564,2012-07-17,3,1,7,0,2,1,1,0.818333,0.755683,0.505833,0.114429,921,5865,6786
566 | 565,2012-07-18,3,1,7,0,3,1,1,0.793333,0.745583,0.577083,0.137442,799,4914,5713
567 | 566,2012-07-19,3,1,7,0,4,1,1,0.77,0.714642,0.600417,0.165429,888,5703,6591
568 | 567,2012-07-20,3,1,7,0,5,1,2,0.665833,0.613025,0.844167,0.208967,747,5123,5870
569 | 568,2012-07-21,3,1,7,0,6,0,3,0.595833,0.549912,0.865417,0.2133,1264,3195,4459
570 | 569,2012-07-22,3,1,7,0,0,0,2,0.6675,0.623125,0.7625,0.0939208,2544,4866,7410
571 | 570,2012-07-23,3,1,7,0,1,1,1,0.741667,0.690017,0.694167,0.138683,1135,5831,6966
572 | 571,2012-07-24,3,1,7,0,2,1,1,0.750833,0.70645,0.655,0.211454,1140,6452,7592
573 | 572,2012-07-25,3,1,7,0,3,1,1,0.724167,0.654054,0.45,0.1648,1383,6790,8173
574 | 573,2012-07-26,3,1,7,0,4,1,1,0.776667,0.739263,0.596667,0.284813,1036,5825,6861
575 | 574,2012-07-27,3,1,7,0,5,1,1,0.781667,0.734217,0.594583,0.152992,1259,5645,6904
576 | 575,2012-07-28,3,1,7,0,6,0,1,0.755833,0.697604,0.613333,0.15735,2234,4451,6685
577 | 576,2012-07-29,3,1,7,0,0,0,1,0.721667,0.667933,0.62375,0.170396,2153,4444,6597
578 | 577,2012-07-30,3,1,7,0,1,1,1,0.730833,0.684987,0.66875,0.153617,1040,6065,7105
579 | 578,2012-07-31,3,1,7,0,2,1,1,0.713333,0.662896,0.704167,0.165425,968,6248,7216
580 | 579,2012-08-01,3,1,8,0,3,1,1,0.7175,0.667308,0.6775,0.141179,1074,6506,7580
581 | 580,2012-08-02,3,1,8,0,4,1,1,0.7525,0.707088,0.659583,0.129354,983,6278,7261
582 | 581,2012-08-03,3,1,8,0,5,1,2,0.765833,0.722867,0.6425,0.215792,1328,5847,7175
583 | 582,2012-08-04,3,1,8,0,6,0,1,0.793333,0.751267,0.613333,0.257458,2345,4479,6824
584 | 583,2012-08-05,3,1,8,0,0,0,1,0.769167,0.731079,0.6525,0.290421,1707,3757,5464
585 | 584,2012-08-06,3,1,8,0,1,1,2,0.7525,0.710246,0.654167,0.129354,1233,5780,7013
586 | 585,2012-08-07,3,1,8,0,2,1,2,0.735833,0.697621,0.70375,0.116908,1278,5995,7273
587 | 586,2012-08-08,3,1,8,0,3,1,2,0.75,0.707717,0.672917,0.1107,1263,6271,7534
588 | 587,2012-08-09,3,1,8,0,4,1,1,0.755833,0.699508,0.620417,0.1561,1196,6090,7286
589 | 588,2012-08-10,3,1,8,0,5,1,2,0.715833,0.667942,0.715833,0.238813,1065,4721,5786
590 | 589,2012-08-11,3,1,8,0,6,0,2,0.6925,0.638267,0.732917,0.206479,2247,4052,6299
591 | 590,2012-08-12,3,1,8,0,0,0,1,0.700833,0.644579,0.530417,0.122512,2182,4362,6544
592 | 591,2012-08-13,3,1,8,0,1,1,1,0.720833,0.662254,0.545417,0.136212,1207,5676,6883
593 | 592,2012-08-14,3,1,8,0,2,1,1,0.726667,0.676779,0.686667,0.169158,1128,5656,6784
594 | 593,2012-08-15,3,1,8,0,3,1,1,0.706667,0.654037,0.619583,0.169771,1198,6149,7347
595 | 594,2012-08-16,3,1,8,0,4,1,1,0.719167,0.654688,0.519167,0.141796,1338,6267,7605
596 | 595,2012-08-17,3,1,8,0,5,1,1,0.723333,0.2424,0.570833,0.231354,1483,5665,7148
597 | 596,2012-08-18,3,1,8,0,6,0,1,0.678333,0.618071,0.603333,0.177867,2827,5038,7865
598 | 597,2012-08-19,3,1,8,0,0,0,2,0.635833,0.603554,0.711667,0.08645,1208,3341,4549
599 | 598,2012-08-20,3,1,8,0,1,1,2,0.635833,0.595967,0.734167,0.129979,1026,5504,6530
600 | 599,2012-08-21,3,1,8,0,2,1,1,0.649167,0.601025,0.67375,0.0727708,1081,5925,7006
601 | 600,2012-08-22,3,1,8,0,3,1,1,0.6675,0.621854,0.677083,0.0702833,1094,6281,7375
602 | 601,2012-08-23,3,1,8,0,4,1,1,0.695833,0.637008,0.635833,0.0845958,1363,6402,7765
603 | 602,2012-08-24,3,1,8,0,5,1,2,0.7025,0.6471,0.615,0.0721458,1325,6257,7582
604 | 603,2012-08-25,3,1,8,0,6,0,2,0.661667,0.618696,0.712917,0.244408,1829,4224,6053
605 | 604,2012-08-26,3,1,8,0,0,0,2,0.653333,0.595996,0.845833,0.228858,1483,3772,5255
606 | 605,2012-08-27,3,1,8,0,1,1,1,0.703333,0.654688,0.730417,0.128733,989,5928,6917
607 | 606,2012-08-28,3,1,8,0,2,1,1,0.728333,0.66605,0.62,0.190925,935,6105,7040
608 | 607,2012-08-29,3,1,8,0,3,1,1,0.685,0.635733,0.552083,0.112562,1177,6520,7697
609 | 608,2012-08-30,3,1,8,0,4,1,1,0.706667,0.652779,0.590417,0.0771167,1172,6541,7713
610 | 609,2012-08-31,3,1,8,0,5,1,1,0.764167,0.6894,0.5875,0.168533,1433,5917,7350
611 | 610,2012-09-01,3,1,9,0,6,0,2,0.753333,0.702654,0.638333,0.113187,2352,3788,6140
612 | 611,2012-09-02,3,1,9,0,0,0,2,0.696667,0.649,0.815,0.0640708,2613,3197,5810
613 | 612,2012-09-03,3,1,9,1,1,0,1,0.7075,0.661629,0.790833,0.151121,1965,4069,6034
614 | 613,2012-09-04,3,1,9,0,2,1,1,0.725833,0.686888,0.755,0.236321,867,5997,6864
615 | 614,2012-09-05,3,1,9,0,3,1,1,0.736667,0.708983,0.74125,0.187808,832,6280,7112
616 | 615,2012-09-06,3,1,9,0,4,1,2,0.696667,0.655329,0.810417,0.142421,611,5592,6203
617 | 616,2012-09-07,3,1,9,0,5,1,1,0.703333,0.657204,0.73625,0.171646,1045,6459,7504
618 | 617,2012-09-08,3,1,9,0,6,0,2,0.659167,0.611121,0.799167,0.281104,1557,4419,5976
619 | 618,2012-09-09,3,1,9,0,0,0,1,0.61,0.578925,0.5475,0.224496,2570,5657,8227
620 | 619,2012-09-10,3,1,9,0,1,1,1,0.583333,0.565654,0.50375,0.258713,1118,6407,7525
621 | 620,2012-09-11,3,1,9,0,2,1,1,0.5775,0.554292,0.52,0.0920542,1070,6697,7767
622 | 621,2012-09-12,3,1,9,0,3,1,1,0.599167,0.570075,0.577083,0.131846,1050,6820,7870
623 | 622,2012-09-13,3,1,9,0,4,1,1,0.6125,0.579558,0.637083,0.0827208,1054,6750,7804
624 | 623,2012-09-14,3,1,9,0,5,1,1,0.633333,0.594083,0.6725,0.103863,1379,6630,8009
625 | 624,2012-09-15,3,1,9,0,6,0,1,0.608333,0.585867,0.501667,0.247521,3160,5554,8714
626 | 625,2012-09-16,3,1,9,0,0,0,1,0.58,0.563125,0.57,0.0901833,2166,5167,7333
627 | 626,2012-09-17,3,1,9,0,1,1,2,0.580833,0.55305,0.734583,0.151742,1022,5847,6869
628 | 627,2012-09-18,3,1,9,0,2,1,2,0.623333,0.565067,0.8725,0.357587,371,3702,4073
629 | 628,2012-09-19,3,1,9,0,3,1,1,0.5525,0.540404,0.536667,0.215175,788,6803,7591
630 | 629,2012-09-20,3,1,9,0,4,1,1,0.546667,0.532192,0.618333,0.118167,939,6781,7720
631 | 630,2012-09-21,3,1,9,0,5,1,1,0.599167,0.571971,0.66875,0.154229,1250,6917,8167
632 | 631,2012-09-22,3,1,9,0,6,0,1,0.65,0.610488,0.646667,0.283583,2512,5883,8395
633 | 632,2012-09-23,4,1,9,0,0,0,1,0.529167,0.518933,0.467083,0.223258,2454,5453,7907
634 | 633,2012-09-24,4,1,9,0,1,1,1,0.514167,0.502513,0.492917,0.142404,1001,6435,7436
635 | 634,2012-09-25,4,1,9,0,2,1,1,0.55,0.544179,0.57,0.236321,845,6693,7538
636 | 635,2012-09-26,4,1,9,0,3,1,1,0.635,0.596613,0.630833,0.2444,787,6946,7733
637 | 636,2012-09-27,4,1,9,0,4,1,2,0.65,0.607975,0.690833,0.134342,751,6642,7393
638 | 637,2012-09-28,4,1,9,0,5,1,2,0.619167,0.585863,0.69,0.164179,1045,6370,7415
639 | 638,2012-09-29,4,1,9,0,6,0,1,0.5425,0.530296,0.542917,0.227604,2589,5966,8555
640 | 639,2012-09-30,4,1,9,0,0,0,1,0.526667,0.517663,0.583333,0.134958,2015,4874,6889
641 | 640,2012-10-01,4,1,10,0,1,1,2,0.520833,0.512,0.649167,0.0908042,763,6015,6778
642 | 641,2012-10-02,4,1,10,0,2,1,3,0.590833,0.542333,0.871667,0.104475,315,4324,4639
643 | 642,2012-10-03,4,1,10,0,3,1,2,0.6575,0.599133,0.79375,0.0665458,728,6844,7572
644 | 643,2012-10-04,4,1,10,0,4,1,2,0.6575,0.607975,0.722917,0.117546,891,6437,7328
645 | 644,2012-10-05,4,1,10,0,5,1,1,0.615,0.580187,0.6275,0.10635,1516,6640,8156
646 | 645,2012-10-06,4,1,10,0,6,0,1,0.554167,0.538521,0.664167,0.268025,3031,4934,7965
647 | 646,2012-10-07,4,1,10,0,0,0,2,0.415833,0.419813,0.708333,0.141162,781,2729,3510
648 | 647,2012-10-08,4,1,10,1,1,0,2,0.383333,0.387608,0.709583,0.189679,874,4604,5478
649 | 648,2012-10-09,4,1,10,0,2,1,2,0.446667,0.438112,0.761667,0.1903,601,5791,6392
650 | 649,2012-10-10,4,1,10,0,3,1,1,0.514167,0.503142,0.630833,0.187821,780,6911,7691
651 | 650,2012-10-11,4,1,10,0,4,1,1,0.435,0.431167,0.463333,0.181596,834,6736,7570
652 | 651,2012-10-12,4,1,10,0,5,1,1,0.4375,0.433071,0.539167,0.235092,1060,6222,7282
653 | 652,2012-10-13,4,1,10,0,6,0,1,0.393333,0.391396,0.494583,0.146142,2252,4857,7109
654 | 653,2012-10-14,4,1,10,0,0,0,1,0.521667,0.508204,0.640417,0.278612,2080,4559,6639
655 | 654,2012-10-15,4,1,10,0,1,1,2,0.561667,0.53915,0.7075,0.296037,760,5115,5875
656 | 655,2012-10-16,4,1,10,0,2,1,1,0.468333,0.460846,0.558333,0.182221,922,6612,7534
657 | 656,2012-10-17,4,1,10,0,3,1,1,0.455833,0.450108,0.692917,0.101371,979,6482,7461
658 | 657,2012-10-18,4,1,10,0,4,1,2,0.5225,0.512625,0.728333,0.236937,1008,6501,7509
659 | 658,2012-10-19,4,1,10,0,5,1,2,0.563333,0.537896,0.815,0.134954,753,4671,5424
660 | 659,2012-10-20,4,1,10,0,6,0,1,0.484167,0.472842,0.572917,0.117537,2806,5284,8090
661 | 660,2012-10-21,4,1,10,0,0,0,1,0.464167,0.456429,0.51,0.166054,2132,4692,6824
662 | 661,2012-10-22,4,1,10,0,1,1,1,0.4875,0.482942,0.568333,0.0814833,830,6228,7058
663 | 662,2012-10-23,4,1,10,0,2,1,1,0.544167,0.530304,0.641667,0.0945458,841,6625,7466
664 | 663,2012-10-24,4,1,10,0,3,1,1,0.5875,0.558721,0.63625,0.0727792,795,6898,7693
665 | 664,2012-10-25,4,1,10,0,4,1,2,0.55,0.529688,0.800417,0.124375,875,6484,7359
666 | 665,2012-10-26,4,1,10,0,5,1,2,0.545833,0.52275,0.807083,0.132467,1182,6262,7444
667 | 666,2012-10-27,4,1,10,0,6,0,2,0.53,0.515133,0.72,0.235692,2643,5209,7852
668 | 667,2012-10-28,4,1,10,0,0,0,2,0.4775,0.467771,0.694583,0.398008,998,3461,4459
669 | 668,2012-10-29,4,1,10,0,1,1,3,0.44,0.4394,0.88,0.3582,2,20,22
670 | 669,2012-10-30,4,1,10,0,2,1,2,0.318182,0.309909,0.825455,0.213009,87,1009,1096
671 | 670,2012-10-31,4,1,10,0,3,1,2,0.3575,0.3611,0.666667,0.166667,419,5147,5566
672 | 671,2012-11-01,4,1,11,0,4,1,2,0.365833,0.369942,0.581667,0.157346,466,5520,5986
673 | 672,2012-11-02,4,1,11,0,5,1,1,0.355,0.356042,0.522083,0.266175,618,5229,5847
674 | 673,2012-11-03,4,1,11,0,6,0,2,0.343333,0.323846,0.49125,0.270529,1029,4109,5138
675 | 674,2012-11-04,4,1,11,0,0,0,1,0.325833,0.329538,0.532917,0.179108,1201,3906,5107
676 | 675,2012-11-05,4,1,11,0,1,1,1,0.319167,0.308075,0.494167,0.236325,378,4881,5259
677 | 676,2012-11-06,4,1,11,0,2,1,1,0.280833,0.281567,0.567083,0.173513,466,5220,5686
678 | 677,2012-11-07,4,1,11,0,3,1,2,0.295833,0.274621,0.5475,0.304108,326,4709,5035
679 | 678,2012-11-08,4,1,11,0,4,1,1,0.352174,0.341891,0.333478,0.347835,340,4975,5315
680 | 679,2012-11-09,4,1,11,0,5,1,1,0.361667,0.355413,0.540833,0.214558,709,5283,5992
681 | 680,2012-11-10,4,1,11,0,6,0,1,0.389167,0.393937,0.645417,0.0578458,2090,4446,6536
682 | 681,2012-11-11,4,1,11,0,0,0,1,0.420833,0.421713,0.659167,0.1275,2290,4562,6852
683 | 682,2012-11-12,4,1,11,1,1,0,1,0.485,0.475383,0.741667,0.173517,1097,5172,6269
684 | 683,2012-11-13,4,1,11,0,2,1,2,0.343333,0.323225,0.662917,0.342046,327,3767,4094
685 | 684,2012-11-14,4,1,11,0,3,1,1,0.289167,0.281563,0.552083,0.199625,373,5122,5495
686 | 685,2012-11-15,4,1,11,0,4,1,2,0.321667,0.324492,0.620417,0.152987,320,5125,5445
687 | 686,2012-11-16,4,1,11,0,5,1,1,0.345,0.347204,0.524583,0.171025,484,5214,5698
688 | 687,2012-11-17,4,1,11,0,6,0,1,0.325,0.326383,0.545417,0.179729,1313,4316,5629
689 | 688,2012-11-18,4,1,11,0,0,0,1,0.3425,0.337746,0.692917,0.227612,922,3747,4669
690 | 689,2012-11-19,4,1,11,0,1,1,2,0.380833,0.375621,0.623333,0.235067,449,5050,5499
691 | 690,2012-11-20,4,1,11,0,2,1,2,0.374167,0.380667,0.685,0.082725,534,5100,5634
692 | 691,2012-11-21,4,1,11,0,3,1,1,0.353333,0.364892,0.61375,0.103246,615,4531,5146
693 | 692,2012-11-22,4,1,11,1,4,0,1,0.34,0.350371,0.580417,0.0528708,955,1470,2425
694 | 693,2012-11-23,4,1,11,0,5,1,1,0.368333,0.378779,0.56875,0.148021,1603,2307,3910
695 | 694,2012-11-24,4,1,11,0,6,0,1,0.278333,0.248742,0.404583,0.376871,532,1745,2277
696 | 695,2012-11-25,4,1,11,0,0,0,1,0.245833,0.257583,0.468333,0.1505,309,2115,2424
697 | 696,2012-11-26,4,1,11,0,1,1,1,0.313333,0.339004,0.535417,0.04665,337,4750,5087
698 | 697,2012-11-27,4,1,11,0,2,1,2,0.291667,0.281558,0.786667,0.237562,123,3836,3959
699 | 698,2012-11-28,4,1,11,0,3,1,1,0.296667,0.289762,0.50625,0.210821,198,5062,5260
700 | 699,2012-11-29,4,1,11,0,4,1,1,0.28087,0.298422,0.555652,0.115522,243,5080,5323
701 | 700,2012-11-30,4,1,11,0,5,1,1,0.298333,0.323867,0.649583,0.0584708,362,5306,5668
702 | 701,2012-12-01,4,1,12,0,6,0,2,0.298333,0.316904,0.806667,0.0597042,951,4240,5191
703 | 702,2012-12-02,4,1,12,0,0,0,2,0.3475,0.359208,0.823333,0.124379,892,3757,4649
704 | 703,2012-12-03,4,1,12,0,1,1,1,0.4525,0.455796,0.7675,0.0827208,555,5679,6234
705 | 704,2012-12-04,4,1,12,0,2,1,1,0.475833,0.469054,0.73375,0.174129,551,6055,6606
706 | 705,2012-12-05,4,1,12,0,3,1,1,0.438333,0.428012,0.485,0.324021,331,5398,5729
707 | 706,2012-12-06,4,1,12,0,4,1,1,0.255833,0.258204,0.50875,0.174754,340,5035,5375
708 | 707,2012-12-07,4,1,12,0,5,1,2,0.320833,0.321958,0.764167,0.1306,349,4659,5008
709 | 708,2012-12-08,4,1,12,0,6,0,2,0.381667,0.389508,0.91125,0.101379,1153,4429,5582
710 | 709,2012-12-09,4,1,12,0,0,0,2,0.384167,0.390146,0.905417,0.157975,441,2787,3228
711 | 710,2012-12-10,4,1,12,0,1,1,2,0.435833,0.435575,0.925,0.190308,329,4841,5170
712 | 711,2012-12-11,4,1,12,0,2,1,2,0.353333,0.338363,0.596667,0.296037,282,5219,5501
713 | 712,2012-12-12,4,1,12,0,3,1,2,0.2975,0.297338,0.538333,0.162937,310,5009,5319
714 | 713,2012-12-13,4,1,12,0,4,1,1,0.295833,0.294188,0.485833,0.174129,425,5107,5532
715 | 714,2012-12-14,4,1,12,0,5,1,1,0.281667,0.294192,0.642917,0.131229,429,5182,5611
716 | 715,2012-12-15,4,1,12,0,6,0,1,0.324167,0.338383,0.650417,0.10635,767,4280,5047
717 | 716,2012-12-16,4,1,12,0,0,0,2,0.3625,0.369938,0.83875,0.100742,538,3248,3786
718 | 717,2012-12-17,4,1,12,0,1,1,2,0.393333,0.4015,0.907083,0.0982583,212,4373,4585
719 | 718,2012-12-18,4,1,12,0,2,1,1,0.410833,0.409708,0.66625,0.221404,433,5124,5557
720 | 719,2012-12-19,4,1,12,0,3,1,1,0.3325,0.342162,0.625417,0.184092,333,4934,5267
721 | 720,2012-12-20,4,1,12,0,4,1,2,0.33,0.335217,0.667917,0.132463,314,3814,4128
722 | 721,2012-12-21,1,1,12,0,5,1,2,0.326667,0.301767,0.556667,0.374383,221,3402,3623
723 | 722,2012-12-22,1,1,12,0,6,0,1,0.265833,0.236113,0.44125,0.407346,205,1544,1749
724 | 723,2012-12-23,1,1,12,0,0,0,1,0.245833,0.259471,0.515417,0.133083,408,1379,1787
725 | 724,2012-12-24,1,1,12,0,1,1,2,0.231304,0.2589,0.791304,0.0772304,174,746,920
726 | 725,2012-12-25,1,1,12,1,2,0,2,0.291304,0.294465,0.734783,0.168726,440,573,1013
727 | 726,2012-12-26,1,1,12,0,3,1,3,0.243333,0.220333,0.823333,0.316546,9,432,441
728 | 727,2012-12-27,1,1,12,0,4,1,2,0.254167,0.226642,0.652917,0.350133,247,1867,2114
729 | 728,2012-12-28,1,1,12,0,5,1,2,0.253333,0.255046,0.59,0.155471,644,2451,3095
730 | 729,2012-12-29,1,1,12,0,6,0,2,0.253333,0.2424,0.752917,0.124383,159,1182,1341
731 | 730,2012-12-30,1,1,12,0,0,0,1,0.255833,0.2317,0.483333,0.350754,364,1432,1796
732 | 731,2012-12-31,1,1,12,0,1,1,2,0.215833,0.223487,0.5775,0.154846,439,2290,2729
733 |
--------------------------------------------------------------------------------