├── .gitignore ├── README.md ├── week1 └── quiz1.R ├── week3 └── week3.R ├── week4 ├── week4.R └── data │ └── gaData.csv └── week2 ├── quiz2.R └── week2.R /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .Rhistory 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Coursera-Practical-Machine-Learning 2 | =================================== 3 | 4 | Programming Assignments of Coursera.com online course: Practical Machine Learning 5 | 6 | The repository for the course project of Practical Machine Learning is [here](https://github.com/Xiaodan/Coursera-PML-Quantified-Self-Project/tree/gh-pages). 7 | 8 | This is a four-week course. Jan 5th - Feb 2nd, 2015. 9 | 10 | © Xiaodan Zhang 2015 All Rights reserved. 11 | 12 | -------------------------------------------------------------------------------- /week1/quiz1.R: -------------------------------------------------------------------------------- 1 | ## Quiz 1 2 | # Problem 1. 3 | # Which of the following are steps in building a machine learning algorithm? 4 | # Deciding on an algorithm, Creating features, Evaluating the prediction. 5 | 6 | # Problem 2. 7 | # Suppose we build a prediction algorithm on a data set and it is 100% accurate on that data set. 8 | # Why might the algorithm not work well if we collect a new data set? 9 | # Our algorithm may be overfitting the training data, 10 | # predicting both the signal and the noise. 11 | 12 | # Problem 3. 13 | # typical sizes for the training and the test sets: 14 | # 60% in the training set, 40% in the testing set. 15 | 16 | # Problem 4. 17 | # What are some common error rates for predicting binary variables (i.e. variables with two possible 18 | # values like yes/no, disease/normal, clicked/didn't click)? 19 | # Specificity, Sensitivity 20 | 21 | # Problem 5. 22 | # Suppose that we have created a machine learning algorithm that predicts whether a link will be 23 | # clicked with 99% sensitivity and 99% specificity. The rate the link is clicked is 1/1000 of 24 | # visits to a website. If we predict the link will be clicked on a specific visit, 25 | # what is the probability it will actually be clicked? 26 | # 100,000 visits => 100 clicks 27 | # 99% = sensitivity = TP/(TP+FN) = 99/(99+1) = 99/100 28 | # 99% specificity =TN/(TN+FP) = 98901/(98901+999) = 98901/99900 29 | # P(actually clicked|clicked) = TP/(TP+FP) = 99/(99+999) = 9% -------------------------------------------------------------------------------- /week3/week3.R: -------------------------------------------------------------------------------- 1 | ## Quiz 3 2 | # Problem 1. 3 | library(AppliedPredictiveModeling) 4 | data(segmentationOriginal) 5 | library(caret) 6 | data <- segmentationOriginal 7 | set.seed(125) 8 | inTrain <- data$Case == "Train" 9 | trainData <- data[inTrain, ] 10 | testData <- data[!inTrain, ] 11 | cartModel <- train(Class ~ ., data=trainData, method="rpart") 12 | cartModel$finalModel 13 | # n= 1009 14 | # node), split, n, loss, yval, (yprob) 15 | # * denotes terminal node 16 | # 1) root 1009 373 PS (0.63032706 0.36967294) 17 | # 2) TotalIntenCh2< 45323.5 454 34 PS (0.92511013 0.07488987) * 18 | # 3) TotalIntenCh2>=45323.5 555 216 WS (0.38918919 0.61081081) 19 | # 6) FiberWidthCh1< 9.673245 154 47 PS (0.69480519 0.30519481) * 20 | # 7) FiberWidthCh1>=9.673245 401 109 WS (0.27182045 0.72817955) * 21 | plot(cartModel$finalModel, uniform=T) 22 | text(cartModel$finalModel, cex=0.8) 23 | # a. TotalIntench2 = 23,000; FiberWidthCh1 = 10; PerimStatusCh1=2 => PS 24 | # b. TotalIntench2 = 50,000; FiberWidthCh1 = 10;VarIntenCh4 = 100 => WS 25 | # c. TotalIntench2 = 57,000; FiberWidthCh1 = 8;VarIntenCh4 = 100 => PS 26 | # d. FiberWidthCh1 = 8;VarIntenCh4 = 100; PerimStatusCh1=2 => Not possible to predict 27 | 28 | # Problem 2. 29 | # Q: If K is small in a K-fold cross validation is the bias in the 30 | # estimate of out-of-sample (test set) accuracy smaller or bigger? 31 | # A: bias is larger 32 | # If K is small is the variance in the estimate of out-of-sample 33 | # (test set) accuracy smaller or bigger. 34 | # A: variance is smaller 35 | # Is K large or small in leave one out cross validation? 36 | # A: Under leave one out cross validation K is equal to the sample 37 | # size. 38 | 39 | # Problem 3. 40 | library(pgmm) 41 | data(olive) 42 | dim(olive) 43 | head(olive) 44 | olive <- olive[,-1] 45 | treeModel <- train(Area ~ ., data=olive, method="rpart2") 46 | treeModel 47 | newdata <- as.data.frame(t(colMeans(olive))) 48 | predict(treeModel, newdata) # 2.875 49 | # 2.875. It is strange because Area should be a qualitative 50 | # variable - but tree is reporting the average value of Area as 51 | # a numeric variable in the leaf predicted for newdata 52 | 53 | # Problem 4. 54 | library(ElemStatLearn) 55 | data(SAheart) 56 | set.seed(8484) 57 | train <- sample(1:dim(SAheart)[1],size=dim(SAheart)[1]/2,replace=F) 58 | trainSA <- SAheart[train,] 59 | testSA <- SAheart[-train,] 60 | set.seed(13234) 61 | logitModel <- train(chd ~ age + alcohol + obesity + tobacco + 62 | typea + ldl, data=trainSA, method="glm", 63 | family="binomial") 64 | logitModel 65 | missClass <- function(values,prediction){sum(((prediction > 0.5)*1) != values)/length(values)} 66 | predictTrain <- predict(logitModel, trainSA) 67 | predictTest <- predict(logitModel, testSA) 68 | # Training Set Misclassification rate 69 | missClass(trainSA$chd, predictTrain) # 0.2727273 70 | # Test Set Misclassification rate 71 | missClass(testSA$chd, predictTest) # 0.3116883 72 | 73 | # Problem 5. 74 | library(ElemStatLearn) 75 | data(vowel.train) 76 | data(vowel.test) 77 | head(vowel.train) 78 | head(vowel.test) 79 | dim(vowel.train) # 528 11 80 | dim(vowel.test) # 462 11 81 | vowel.train$y <- as.factor(vowel.train$y) 82 | vowel.test$y <- as.factor(vowel.test$y) 83 | set.seed(33833) 84 | modelRf <- randomForest(y ~ ., data = vowel.train, importance = FALSE) 85 | order(varImp(modelRf), decreasing=T) 86 | # The order of the variables is: 87 | # x.2, x.1, x.5, x.6, x.8, x.4, x.9, x.3, x.7,x.10 88 | 89 | -------------------------------------------------------------------------------- /week4/week4.R: -------------------------------------------------------------------------------- 1 | ## Quiz 4. 2 | # Problem 1. 3 | library(ElemStatLearn) 4 | library(caret) 5 | data(vowel.train) 6 | data(vowel.test) 7 | vowel.train$y <- as.factor(vowel.train$y) 8 | vowel.test$y <- as.factor(vowel.test$y) 9 | set.seed(33833) 10 | # fit rf predictor relating the factor variable y 11 | fitRf <- train(y ~ ., data=vowel.train, method="rf") 12 | fitGBM <- train(y ~ ., data=vowel.train, method="gbm") 13 | predRf <- predict(fitRf, vowel.test) 14 | predGBM <- predict(fitGBM, vowel.test) 15 | # RF Accuracy: 0.6060606 16 | confusionMatrix(predRf, vowel.test$y)$overall[1] 17 | # GBM Accuracy: 0.530303 18 | confusionMatrix(predGBM, vowel.test$y)$overall[1] 19 | pred <- data.frame(predRf, predGBM, y=vowel.test$y, agree=predRf == predGBM) 20 | head(pred) 21 | accuracy <- sum(predRf[pred$agree] == pred$y[pred$agree]) / sum(pred$agree) 22 | accuracy # Agreement Accuracy: 0.6569579 23 | 24 | # Problem 2. 25 | library(caret) 26 | library(gbm) 27 | set.seed(3433) 28 | library(AppliedPredictiveModeling) 29 | data(AlzheimerDisease) 30 | adData <- data.frame(diagnosis, predictors) 31 | inTrain <- createDataPartition(adData$diagnosis, p=3/4)[[1]] 32 | training <- adData[inTrain, ] 33 | testing <- adData[-inTrain, ] 34 | dim(adData) # 333 131 35 | # head(adData) 36 | set.seed(62433) 37 | fitRf <- train(diagnosis ~ ., data=training, method="rf") 38 | fitGBM <- train(diagnosis ~ ., data=training, method="gbm") 39 | fitLDA <- train(diagnosis ~ ., data=training, method="lda") 40 | predRf <- predict(fitRf, testing) 41 | predGBM <- predict(fitGBM, testing) 42 | predLDA <- predict(fitLDA, testing) 43 | pred <- data.frame(predRf, predGBM, predLDA, diagnosis=testing$diagnosis) 44 | # Stack the predictions together using random forests ("rf") 45 | fit <- train(diagnosis ~., data=pred, method="rf") 46 | predFit <- predict(fit, testing) 47 | c1 <- confusionMatrix(predRf, testing$diagnosis)$overall[1] 48 | c2 <- confusionMatrix(predGBM, testing$diagnosis)$overall[1] 49 | c3 <- confusionMatrix(predLDA, testing$diagnosis)$overall[1] 50 | c4 <- confusionMatrix(predFit, testing$diagnosis)$overall[1] 51 | print(paste(c1, c2, c3, c4)) 52 | # Stacked Accuracy: 0.79 is better than random forests and lda 53 | # and the same as boosting. 54 | 55 | # Problem 3. 56 | set.seed(3523) 57 | library(AppliedPredictiveModeling) 58 | library(elasticnet) 59 | data(concrete) 60 | inTrain <- createDataPartition(concrete$CompressiveStrength, 61 | p=3/4)[[1]] 62 | training <- concrete[inTrain, ] 63 | testing <- concrete[-inTrain, ] 64 | set.seed(233) 65 | fit <- train(CompressiveStrength ~ ., data=training, method="lasso") 66 | fit 67 | plot.enet(fit$finalModel, xvar="penalty", use.color=T) # Cement 68 | 69 | # Problem 4. 70 | library(lubridate) # For year() function below 71 | library(forecast) 72 | dat <- read.csv("./data/gaData.csv") 73 | training <- dat[year(dat$date) < 2012, ] 74 | testing <- dat[(year(dat$date)) > 2011, ] 75 | tstrain <- ts(training$visitsTumblr) 76 | fit <- bats(tstrain) 77 | fit 78 | pred <- forecast(fit, level=95, h=dim(testing)[1]) 79 | names(data.frame(pred)) 80 | predComb <- cbind(testing, data.frame(pred)) 81 | names(testing) 82 | names(predComb) 83 | predComb$in95 <- (predComb$Lo.95 < predComb$visitsTumblr) & 84 | (predComb$visitsTumblr < predComb$Hi.95) 85 | # How many of the testing points is the true value within the 86 | # 95% prediction interval bounds? 87 | prop.table(table(predComb$in95))[2] # 0.9617021 88 | 89 | # Problem 5. 90 | set.seed(3523) 91 | library(AppliedPredictiveModeling) 92 | library(e1071) 93 | data(concrete) 94 | inTrain <- createDataPartition(concrete$CompressiveStrength, p=3/4)[[1]] 95 | training <- concrete[inTrain, ] 96 | testing <- concrete[-inTrain, ] 97 | set.seed(325) 98 | fit <- svm(CompressiveStrength ~., data=training) 99 | # OR another way 100 | # fit <- train(CompressiveStrength ~. data=training, method="svmRadial") 101 | pred <- predict(fit, testing) 102 | acc <- accuracy(pred, testing$CompressiveStrength) 103 | acc 104 | acc[2] # RMSE 6.715009 105 | -------------------------------------------------------------------------------- /week2/quiz2.R: -------------------------------------------------------------------------------- 1 | # Quiz 2 2 | # Problem 1. 3 | library(AppliedPredictiveModeling) 4 | library(caret) 5 | data(AlzheimerDisease) 6 | 7 | adData = data.frame(diagnosis, predictors) 8 | testIndex = createDataPartition(diagnosis, p=0.50, list=FALSE) 9 | training = adData[-testIndex,] 10 | testing = adData[testIndex,] 11 | ## OR 12 | training = adData[trainIndex,] 13 | testing = adData[-trainIndex,] 14 | 15 | # Problem 2. 16 | library(AppliedPredictiveModeling) 17 | data(concrete) 18 | library(caret) 19 | set.seed(975) 20 | inTrain = createDataPartition(mixtures$CompressiveStrength, p=3/4)[[1]] 21 | training = mixtures[inTrain,] 22 | testing = mixtures[-inTrain,] 23 | xnames <- colnames(concrete)[1:8] 24 | featurePlot(x=training[, xnames], y=training$CompressiveStrength, plot="pairs") 25 | # No relation between the outcome and other variables 26 | index <- seq_along(1:nrow(training)) 27 | ggplot(data=training, aes(x=index, y=CompressiveStrength)) + geom_point() + 28 | theme_bw() 29 | # Step-like pattern -> 4 categories 30 | library(Hmisc) 31 | cutCompressiveStrength <- cut2(training$CompressiveStrength, g=4) 32 | summary(cutCompressiveStrength) 33 | ggplot(data=training, aes(y=index, x=cutCompressiveStrength)) + 34 | geom_boxplot() + geom_jitter(col="blue") + theme_bw() 35 | # Another way 36 | library(plyr) 37 | splitOn <- cut2(training$Age, g=4) 38 | splitOn <- mapvalues(splitOn, 39 | from=levels(factor(splitOn)), 40 | to=c("red", "blue", "yellow", "green")) 41 | plot(training$CompressiveStrength, col=splitOn) 42 | # There is a step-like pattern in the plot of outcome versus index 43 | # in the training set that isn't explained by any of the predictor 44 | # variables so there may be a variable missing. 45 | 46 | # Problem 3. 47 | library(AppliedPredictiveModeling) 48 | data(concrete) 49 | library(caret) 50 | set.seed(975) 51 | inTrain = createDataPartition(mixtures$CompressiveStrength, p=3/4)[[1]] 52 | training = mixtures[ inTrain,] 53 | testing = mixtures[-inTrain,] 54 | qplot(Superplasticizer, data=training) # OR 55 | ggplot(data=training, aes(x=Superplasticizer)) + geom_histogram() + theme_bw() 56 | # There are a large number of values that are the same and even if 57 | # you took the log(SuperPlasticizer + 1) they would still all be 58 | # identical so the distribution would not be symmetric. 59 | # There are values of zero so when you take the log() transform 60 | # those values will be -Inf. 61 | 62 | # Problem 4. 63 | library(caret) 64 | library(AppliedPredictiveModeling) 65 | set.seed(3433) 66 | data(AlzheimerDisease) 67 | adData = data.frame(diagnosis,predictors) 68 | inTrain = createDataPartition(adData$diagnosis, p=3/4)[[1]] 69 | training = adData[ inTrain,] 70 | testing = adData[-inTrain,] 71 | ss <- training[,grep('^IL', x = names(training) )] 72 | preProc <- preProcess(ss, method='pca', thresh=0.9, 73 | outcome=training$diagnosis) 74 | preProc$rotation # 9 75 | 76 | # Problem 5. 77 | library(caret) 78 | library(AppliedPredictiveModeling) 79 | set.seed(3433) 80 | data(AlzheimerDisease) 81 | adData = data.frame(diagnosis,predictors) 82 | inTrain = createDataPartition(adData$diagnosis, p=3/4)[[1]] 83 | training = adData[ inTrain,] 84 | testing = adData[-inTrain,] 85 | 86 | set.seed(3433) 87 | IL <- grep("^IL", colnames(training), value=TRUE) 88 | ILpredictors <- predictors[, IL] 89 | df <- data.frame(diagnosis, ILpredictors) 90 | inTrain <- createDataPartition(df$diagnosis, p=3/4)[[1]] 91 | training <- df[inTrain, ] 92 | testing <- df[-inTrain, ] 93 | modelFit <- train(diagnosis ~ ., method="glm", data=training) 94 | predictions <- predict(modelFit, newdata=testing) 95 | C1 <- confusionMatrix(predictions, testing$diagnosis) 96 | print(C1) 97 | acc1 <- C1$overall[1] 98 | acc1 # Non-PCA Accuracy: 0.65 99 | 100 | modelFit <- train(training$diagnosis ~ ., 101 | method="glm", 102 | preProcess="pca", 103 | data=training, 104 | trControl=trainControl(preProcOptions=list(thresh=0.8))) 105 | C2 <- confusionMatrix(testing$diagnosis, predict(modelFit, testing)) 106 | print(C2) 107 | acc2 <- C2$overall[1] 108 | acc2 # PCA Accuracy: 0.72 -------------------------------------------------------------------------------- /week2/week2.R: -------------------------------------------------------------------------------- 1 | ## Caret package 2 | library(caret) 3 | library(kernlab) 4 | data(spam) 5 | inTrain <- createDataPartition(y=spam$type, p=0.75, list=F) 6 | training <- spam[inTrain,] 7 | testing <- spam[-inTrain,] 8 | dim(training) 9 | 10 | set.seed(32343) 11 | # install.packages("e1071") 12 | modelFit <- train(type ~., data=training, method="glm") 13 | modelFit 14 | modelFit$finalModel 15 | predictions <- predict(modelFit, newdata=testing) 16 | predictions 17 | 18 | confusionMatrix(predictions, testing$type) 19 | 20 | ## Data slicing 21 | set.seed(32323) 22 | # k-fold 23 | folds <- createFolds(y=spam$type, k=10, list=T, returnTrain=T) 24 | sapply(folds, length) 25 | folds[[1]][1:10] 26 | # return test 27 | set.seed(32323) 28 | folds <- createFolds(y=spam$type, k=10, list=T, returnTrain=F) 29 | sapply(folds, length) 30 | folds[[1]][1:10] 31 | # resampling 32 | set.seed(32323) 33 | folds <- createResample(y=spam$type, times=10, list=T) 34 | sapply(folds, length) 35 | folds[[1]][1:10] 36 | # time slices 37 | set.seed(32323) 38 | tme <- 1:1000 39 | folds <- createTimeSlices(y=tme, initialWindow=20, horizon=10) 40 | names(folds) 41 | folds$train[[1]] 42 | folds$test[[1]] 43 | 44 | ## Training options 45 | args(train.default) 46 | # Continous outcomes - RMSE: root mean squared error 47 | # - RSquared: R^2 from regression models 48 | # Categorical outcomes - Accuracy: Fraction correct 49 | # - Kappa: A measure of concordance 50 | args(trainControl) 51 | # boot, boot632, cv, repeatedcv, LOOCV 52 | 53 | ## Plotting predictors 54 | library(ISLR) 55 | library(ggplot2) 56 | library(caret) 57 | data(Wage) 58 | summary(Wage) 59 | inTrain <- createDataPartition(y=Wage$wage, p=0.7, list=F) 60 | training <- Wage[inTrain,] 61 | testing <- Wage[-inTrain,] 62 | dim(training) 63 | dim(testing) 64 | # feature plot 65 | featurePlot(x=training[, c("age", "education", "jobclass")], 66 | y=training$wage, plot="pairs") 67 | qplot(age, wage, data=training) 68 | qplot(age, wage, color=jobclass, data=training) 69 | # Add regression smoothers 70 | qq <- qplot(age, wage, color=education, data=training) 71 | qq + geom_smooth(method='lm', formula=y~x) 72 | library(Hmisc) 73 | cutWage <- cut2(training$wage, g=3) 74 | table(cutWage) 75 | p1 <- qplot(cutWage, age, data=training, fill=cutWage, geom=c("boxplot")) 76 | p1 77 | p2 <- qplot(cutWage, age, data=training, fill=cutWage, geom=c("boxplot", "jitter")) 78 | library(gridExtra) 79 | library(ggplot2) 80 | grid.arrange(p1, p2, ncol=2) 81 | # Tables 82 | t1 <- table(cutWage, training$jobclass) 83 | t1 84 | prop.table(t1, 1) 85 | # Density plots 86 | qplot(wage, color=education, data=training, geom="density") 87 | # Make plots only in the training set: imbalance in outcomes, outliers 88 | # groups of points not explained by a predictor, skewed variables 89 | 90 | ## Preprocessing 91 | library(caret) 92 | library(kernlab) 93 | data(spam) 94 | inTrain <- createDataPartition(y=spam$type, p=0.75, list=F) 95 | training <- spam[inTrain,] 96 | testing <- spam[-inTrain,] 97 | dim(training) 98 | hist(training$capitalAve, main="", xlab="ave. capital run length") 99 | mean(training$capitalAve) 100 | sd(training$capitalAve) 101 | # Standardizing 102 | trainCapAve <- training$capitalAve 103 | trainCapAveS <- (trainCapAve - mean(trainCapAve)) / sd(trainCapAve) 104 | mean(trainCapAveS) 105 | sd(trainCapAveS) 106 | # Standardizing testing 107 | testCapAve <- testing$capitalAve 108 | testCapAveS <- (testCapAve - mean(trainCapAve)) / sd(trainCapAve) 109 | mean(testCapAveS) 110 | sd(testCapAveS) 111 | 112 | preObj <- preProcess(training[,-58], method=c("center", "scale")) 113 | trainCapAveS <- predict(preObj, training[,-58])$capitalAve 114 | mean(trainCapAveS) 115 | sd(trainCapAveS) 116 | testCapAveS <- predict(preObj, testing[,-58])$capitalAve 117 | mean(testCapAveS) 118 | sd(testCapAveS) 119 | set.seed(32343) 120 | modelFit <- train(type~., data=training, 121 | preProcess=c("center", "scale"), method="glm") 122 | modelFit 123 | # standardizing - Box-cox transform 124 | preObj <- preProcess(training[,-58], method=c("BoxCox")) 125 | trainCapAveS <- predict(preObj, training[,-58])$capitalAve 126 | par(mfrow=c(1, 2)) 127 | hist(trainCapAveS) 128 | qqnorm(trainCapAveS) 129 | # standardizing - Imputing data 130 | set.seed(13343) 131 | training$capAve <- training$capitalAve 132 | selectNA <- rbinom(dim(training)[1], size=1, prob=0.05)==1 133 | training$capAve[selectNA] <- NA 134 | preObj <- preProcess(training[,-58], method="knnImpute") 135 | library(RANN) 136 | capAve <- predict(preObj, training[,-58])$capAve 137 | capAveTruth <- training$capitalAve 138 | capAveTruth <- (capAveTruth - mean(capAveTruth)) / sd(capAveTruth) 139 | quantile(capAve - capAveTruth) 140 | quantile((capAve - capAveTruth)[selectNA]) 141 | quantile((capAve - capAveTruth)[!selectNA]) 142 | -------------------------------------------------------------------------------- /week4/data/gaData.csv: -------------------------------------------------------------------------------- 1 | "","date","visitsTumblr" 2 | "1",2011-01-01,0 3 | "2",2011-01-02,0 4 | "3",2011-01-03,0 5 | "4",2011-01-04,0 6 | "5",2011-01-05,0 7 | "6",2011-01-06,0 8 | "7",2011-01-07,0 9 | "8",2011-01-08,0 10 | "9",2011-01-09,0 11 | "10",2011-01-10,0 12 | "11",2011-01-11,0 13 | "12",2011-01-12,0 14 | "13",2011-01-13,0 15 | "14",2011-01-14,0 16 | "15",2011-01-15,0 17 | "16",2011-01-16,0 18 | "17",2011-01-17,0 19 | "18",2011-01-18,0 20 | "19",2011-01-19,0 21 | "20",2011-01-20,0 22 | "21",2011-01-21,0 23 | "22",2011-01-22,0 24 | "23",2011-01-23,0 25 | "24",2011-01-24,0 26 | "25",2011-01-25,0 27 | "26",2011-01-26,0 28 | "27",2011-01-27,0 29 | "28",2011-01-28,0 30 | "29",2011-01-29,0 31 | "30",2011-01-30,0 32 | "31",2011-01-31,0 33 | "32",2011-02-01,0 34 | "33",2011-02-02,0 35 | "34",2011-02-03,0 36 | "35",2011-02-04,0 37 | "36",2011-02-05,0 38 | "37",2011-02-06,0 39 | "38",2011-02-07,0 40 | "39",2011-02-08,0 41 | "40",2011-02-09,0 42 | "41",2011-02-10,0 43 | "42",2011-02-11,0 44 | "43",2011-02-12,0 45 | "44",2011-02-13,0 46 | "45",2011-02-14,0 47 | "46",2011-02-15,0 48 | "47",2011-02-16,0 49 | "48",2011-02-17,0 50 | "49",2011-02-18,0 51 | "50",2011-02-19,0 52 | "51",2011-02-20,0 53 | "52",2011-02-21,0 54 | "53",2011-02-22,0 55 | "54",2011-02-23,0 56 | "55",2011-02-24,0 57 | "56",2011-02-25,0 58 | "57",2011-02-26,0 59 | "58",2011-02-27,0 60 | "59",2011-02-28,0 61 | "60",2011-03-01,0 62 | "61",2011-03-02,0 63 | "62",2011-03-03,0 64 | "63",2011-03-04,0 65 | "64",2011-03-05,0 66 | "65",2011-03-06,0 67 | "66",2011-03-07,0 68 | "67",2011-03-08,0 69 | "68",2011-03-09,0 70 | "69",2011-03-10,0 71 | "70",2011-03-11,0 72 | "71",2011-03-12,0 73 | "72",2011-03-13,0 74 | "73",2011-03-14,0 75 | "74",2011-03-15,0 76 | "75",2011-03-16,0 77 | "76",2011-03-17,0 78 | "77",2011-03-18,0 79 | "78",2011-03-19,0 80 | "79",2011-03-20,0 81 | "80",2011-03-21,0 82 | "81",2011-03-22,0 83 | "82",2011-03-23,0 84 | "83",2011-03-24,0 85 | "84",2011-03-25,0 86 | "85",2011-03-26,0 87 | "86",2011-03-27,0 88 | "87",2011-03-28,0 89 | "88",2011-03-29,0 90 | "89",2011-03-30,0 91 | "90",2011-03-31,0 92 | "91",2011-04-01,0 93 | "92",2011-04-02,0 94 | "93",2011-04-03,0 95 | "94",2011-04-04,0 96 | "95",2011-04-05,0 97 | "96",2011-04-06,0 98 | "97",2011-04-07,0 99 | "98",2011-04-08,0 100 | "99",2011-04-09,0 101 | "100",2011-04-10,0 102 | "101",2011-04-11,0 103 | "102",2011-04-12,0 104 | "103",2011-04-13,0 105 | "104",2011-04-14,0 106 | "105",2011-04-15,0 107 | "106",2011-04-16,0 108 | "107",2011-04-17,0 109 | "108",2011-04-18,0 110 | "109",2011-04-19,0 111 | "110",2011-04-20,0 112 | "111",2011-04-21,0 113 | "112",2011-04-22,0 114 | "113",2011-04-23,0 115 | "114",2011-04-24,0 116 | "115",2011-04-25,0 117 | "116",2011-04-26,0 118 | "117",2011-04-27,0 119 | "118",2011-04-28,0 120 | "119",2011-04-29,0 121 | "120",2011-04-30,0 122 | "121",2011-05-01,0 123 | "122",2011-05-02,0 124 | "123",2011-05-03,0 125 | "124",2011-05-04,0 126 | "125",2011-05-05,0 127 | "126",2011-05-06,0 128 | "127",2011-05-07,0 129 | "128",2011-05-08,0 130 | "129",2011-05-09,0 131 | "130",2011-05-10,0 132 | "131",2011-05-11,0 133 | "132",2011-05-12,0 134 | "133",2011-05-13,0 135 | "134",2011-05-14,0 136 | "135",2011-05-15,0 137 | "136",2011-05-16,0 138 | "137",2011-05-17,0 139 | "138",2011-05-18,0 140 | "139",2011-05-19,0 141 | "140",2011-05-20,0 142 | "141",2011-05-21,0 143 | "142",2011-05-22,0 144 | "143",2011-05-23,0 145 | "144",2011-05-24,0 146 | "145",2011-05-25,0 147 | "146",2011-05-26,0 148 | "147",2011-05-27,0 149 | "148",2011-05-28,0 150 | "149",2011-05-29,0 151 | "150",2011-05-30,0 152 | "151",2011-05-31,0 153 | "152",2011-06-01,0 154 | "153",2011-06-02,0 155 | "154",2011-06-03,0 156 | "155",2011-06-04,0 157 | "156",2011-06-05,0 158 | "157",2011-06-06,0 159 | "158",2011-06-07,0 160 | "159",2011-06-08,0 161 | "160",2011-06-09,0 162 | "161",2011-06-10,0 163 | "162",2011-06-11,0 164 | "163",2011-06-12,0 165 | "164",2011-06-13,0 166 | "165",2011-06-14,0 167 | "166",2011-06-15,0 168 | "167",2011-06-16,0 169 | "168",2011-06-17,0 170 | "169",2011-06-18,0 171 | "170",2011-06-19,0 172 | "171",2011-06-20,0 173 | "172",2011-06-21,0 174 | "173",2011-06-22,0 175 | "174",2011-06-23,0 176 | "175",2011-06-24,0 177 | "176",2011-06-25,0 178 | "177",2011-06-26,0 179 | "178",2011-06-27,0 180 | "179",2011-06-28,0 181 | "180",2011-06-29,0 182 | "181",2011-06-30,0 183 | "182",2011-07-01,0 184 | "183",2011-07-02,0 185 | "184",2011-07-03,0 186 | "185",2011-07-04,0 187 | "186",2011-07-05,0 188 | "187",2011-07-06,0 189 | "188",2011-07-07,0 190 | "189",2011-07-08,0 191 | "190",2011-07-09,0 192 | "191",2011-07-10,0 193 | "192",2011-07-11,0 194 | "193",2011-07-12,0 195 | "194",2011-07-13,0 196 | "195",2011-07-14,0 197 | "196",2011-07-15,0 198 | "197",2011-07-16,0 199 | "198",2011-07-17,0 200 | "199",2011-07-18,0 201 | "200",2011-07-19,0 202 | "201",2011-07-20,0 203 | "202",2011-07-21,0 204 | "203",2011-07-22,0 205 | "204",2011-07-23,0 206 | "205",2011-07-24,0 207 | "206",2011-07-25,0 208 | "207",2011-07-26,0 209 | "208",2011-07-27,0 210 | "209",2011-07-28,0 211 | "210",2011-07-29,0 212 | "211",2011-07-30,0 213 | "212",2011-07-31,0 214 | "213",2011-08-01,0 215 | "214",2011-08-02,0 216 | "215",2011-08-03,0 217 | "216",2011-08-04,0 218 | "217",2011-08-05,0 219 | "218",2011-08-06,0 220 | "219",2011-08-07,0 221 | "220",2011-08-08,0 222 | "221",2011-08-09,0 223 | "222",2011-08-10,0 224 | "223",2011-08-11,0 225 | "224",2011-08-12,0 226 | "225",2011-08-13,0 227 | "226",2011-08-14,0 228 | "227",2011-08-15,0 229 | "228",2011-08-16,0 230 | "229",2011-08-17,0 231 | "230",2011-08-18,0 232 | "231",2011-08-19,0 233 | "232",2011-08-20,0 234 | "233",2011-08-21,0 235 | "234",2011-08-22,0 236 | "235",2011-08-23,0 237 | "236",2011-08-24,0 238 | "237",2011-08-25,0 239 | "238",2011-08-26,0 240 | "239",2011-08-27,0 241 | "240",2011-08-28,0 242 | "241",2011-08-29,0 243 | "242",2011-08-30,0 244 | "243",2011-08-31,0 245 | "244",2011-09-01,0 246 | "245",2011-09-02,0 247 | "246",2011-09-03,0 248 | "247",2011-09-04,0 249 | "248",2011-09-05,0 250 | "249",2011-09-06,0 251 | "250",2011-09-07,0 252 | "251",2011-09-08,10 253 | "252",2011-09-09,14 254 | "253",2011-09-10,10 255 | "254",2011-09-11,8 256 | "255",2011-09-12,95 257 | "256",2011-09-13,44 258 | "257",2011-09-14,15 259 | "258",2011-09-15,15 260 | "259",2011-09-16,34 261 | "260",2011-09-17,26 262 | "261",2011-09-18,17 263 | "262",2011-09-19,63 264 | "263",2011-09-20,35 265 | "264",2011-09-21,45 266 | "265",2011-09-22,50 267 | "266",2011-09-23,82 268 | "267",2011-09-24,35 269 | "268",2011-09-25,29 270 | "269",2011-09-26,52 271 | "270",2011-09-27,53 272 | "271",2011-09-28,118 273 | "272",2011-09-29,93 274 | "273",2011-09-30,58 275 | "274",2011-10-01,56 276 | "275",2011-10-02,52 277 | "276",2011-10-03,47 278 | "277",2011-10-04,250 279 | "278",2011-10-05,121 280 | "279",2011-10-06,91 281 | "280",2011-10-07,93 282 | "281",2011-10-08,41 283 | "282",2011-10-09,52 284 | "283",2011-10-10,580 285 | "284",2011-10-11,230 286 | "285",2011-10-12,119 287 | "286",2011-10-13,99 288 | "287",2011-10-14,109 289 | "288",2011-10-15,252 290 | "289",2011-10-16,97 291 | "290",2011-10-17,176 292 | "291",2011-10-18,196 293 | "292",2011-10-19,145 294 | "293",2011-10-20,128 295 | "294",2011-10-21,142 296 | "295",2011-10-22,81 297 | "296",2011-10-23,64 298 | "297",2011-10-24,108 299 | "298",2011-10-25,68 300 | "299",2011-10-26,77 301 | "300",2011-10-27,138 302 | "301",2011-10-28,89 303 | "302",2011-10-29,57 304 | "303",2011-10-30,60 305 | "304",2011-10-31,69 306 | "305",2011-11-01,184 307 | "306",2011-11-02,356 308 | "307",2011-11-03,118 309 | "308",2011-11-04,214 310 | "309",2011-11-05,145 311 | "310",2011-11-06,114 312 | "311",2011-11-07,191 313 | "312",2011-11-08,131 314 | "313",2011-11-09,157 315 | "314",2011-11-10,632 316 | "315",2011-11-11,758 317 | "316",2011-11-12,191 318 | "317",2011-11-13,140 319 | "318",2011-11-14,305 320 | "319",2011-11-15,313 321 | "320",2011-11-16,378 322 | "321",2011-11-17,306 323 | "322",2011-11-18,251 324 | "323",2011-11-19,142 325 | "324",2011-11-20,164 326 | "325",2011-11-21,178 327 | "326",2011-11-22,175 328 | "327",2011-11-23,261 329 | "328",2011-11-24,320 330 | "329",2011-11-25,142 331 | "330",2011-11-26,124 332 | "331",2011-11-27,386 333 | "332",2011-11-28,4997 334 | "333",2011-11-29,1372 335 | "334",2011-11-30,657 336 | "335",2011-12-01,562 337 | "336",2011-12-02,420 338 | "337",2011-12-03,229 339 | "338",2011-12-04,156 340 | "339",2011-12-05,255 341 | "340",2011-12-06,262 342 | "341",2011-12-07,253 343 | "342",2011-12-08,304 344 | "343",2011-12-09,202 345 | "344",2011-12-10,90 346 | "345",2011-12-11,76 347 | "346",2011-12-12,232 348 | "347",2011-12-13,212 349 | "348",2011-12-14,306 350 | "349",2011-12-15,196 351 | "350",2011-12-16,319 352 | "351",2011-12-17,185 353 | "352",2011-12-18,377 354 | "353",2011-12-19,307 355 | "354",2011-12-20,270 356 | "355",2011-12-21,334 357 | "356",2011-12-22,295 358 | "357",2011-12-23,120 359 | "358",2011-12-24,74 360 | "359",2011-12-25,55 361 | "360",2011-12-26,75 362 | "361",2011-12-27,81 363 | "362",2011-12-28,159 364 | "363",2011-12-29,121 365 | "364",2011-12-30,72 366 | "365",2011-12-31,59 367 | "366",2012-01-01,68 368 | "367",2012-01-02,152 369 | "368",2012-01-03,332 370 | "369",2012-01-04,199 371 | "370",2012-01-05,223 372 | "371",2012-01-06,261 373 | "372",2012-01-07,132 374 | "373",2012-01-08,277 375 | "374",2012-01-09,313 376 | "375",2012-01-10,255 377 | "376",2012-01-11,263 378 | "377",2012-01-12,741 379 | "378",2012-01-13,442 380 | "379",2012-01-14,122 381 | "380",2012-01-15,155 382 | "381",2012-01-16,250 383 | "382",2012-01-17,203 384 | "383",2012-01-18,198 385 | "384",2012-01-19,221 386 | "385",2012-01-20,244 387 | "386",2012-01-21,107 388 | "387",2012-01-22,221 389 | "388",2012-01-23,265 390 | "389",2012-01-24,1250 391 | "390",2012-01-25,550 392 | "391",2012-01-26,360 393 | "392",2012-01-27,307 394 | "393",2012-01-28,164 395 | "394",2012-01-29,261 396 | "395",2012-01-30,331 397 | "396",2012-01-31,392 398 | "397",2012-02-01,292 399 | "398",2012-02-02,240 400 | "399",2012-02-03,254 401 | "400",2012-02-04,171 402 | "401",2012-02-05,205 403 | "402",2012-02-06,372 404 | "403",2012-02-07,220 405 | "404",2012-02-08,250 406 | "405",2012-02-09,369 407 | "406",2012-02-10,380 408 | "407",2012-02-11,262 409 | "408",2012-02-12,449 410 | "409",2012-02-13,740 411 | "410",2012-02-14,470 412 | "411",2012-02-15,464 413 | "412",2012-02-16,367 414 | "413",2012-02-17,247 415 | "414",2012-02-18,139 416 | "415",2012-02-19,200 417 | "416",2012-02-20,298 418 | "417",2012-02-21,286 419 | "418",2012-02-22,253 420 | "419",2012-02-23,613 421 | "420",2012-02-24,354 422 | "421",2012-02-25,205 423 | "422",2012-02-26,214 424 | "423",2012-02-27,344 425 | "424",2012-02-28,315 426 | "425",2012-02-29,452 427 | "426",2012-03-01,486 428 | "427",2012-03-02,457 429 | "428",2012-03-03,217 430 | "429",2012-03-04,402 431 | "430",2012-03-05,406 432 | "431",2012-03-06,277 433 | "432",2012-03-07,296 434 | "433",2012-03-08,730 435 | "434",2012-03-09,489 436 | "435",2012-03-10,201 437 | "436",2012-03-11,230 438 | "437",2012-03-12,332 439 | "438",2012-03-13,333 440 | "439",2012-03-14,466 441 | "440",2012-03-15,228 442 | "441",2012-03-16,304 443 | "442",2012-03-17,160 444 | "443",2012-03-18,448 445 | "444",2012-03-19,451 446 | "445",2012-03-20,706 447 | "446",2012-03-21,373 448 | "447",2012-03-22,342 449 | "448",2012-03-23,502 450 | "449",2012-03-24,232 451 | "450",2012-03-25,328 452 | "451",2012-03-26,662 453 | "452",2012-03-27,392 454 | "453",2012-03-28,261 455 | "454",2012-03-29,336 456 | "455",2012-03-30,326 457 | "456",2012-03-31,149 458 | "457",2012-04-01,158 459 | "458",2012-04-02,221 460 | "459",2012-04-03,255 461 | "460",2012-04-04,281 462 | "461",2012-04-05,218 463 | "462",2012-04-06,189 464 | "463",2012-04-07,126 465 | "464",2012-04-08,189 466 | "465",2012-04-09,514 467 | "466",2012-04-10,389 468 | "467",2012-04-11,628 469 | "468",2012-04-12,444 470 | "469",2012-04-13,773 471 | "470",2012-04-14,317 472 | "471",2012-04-15,317 473 | "472",2012-04-16,400 474 | "473",2012-04-17,309 475 | "474",2012-04-18,388 476 | "475",2012-04-19,337 477 | "476",2012-04-20,243 478 | "477",2012-04-21,174 479 | "478",2012-04-22,266 480 | "479",2012-04-23,386 481 | "480",2012-04-24,333 482 | "481",2012-04-25,411 483 | "482",2012-04-26,405 484 | "483",2012-04-27,844 485 | "484",2012-04-28,693 486 | "485",2012-04-29,683 487 | "486",2012-04-30,1081 488 | "487",2012-05-01,666 489 | "488",2012-05-02,622 490 | "489",2012-05-03,461 491 | "490",2012-05-04,464 492 | "491",2012-05-05,279 493 | "492",2012-05-06,313 494 | "493",2012-05-07,1065 495 | "494",2012-05-08,541 496 | "495",2012-05-09,385 497 | "496",2012-05-10,466 498 | "497",2012-05-11,596 499 | "498",2012-05-12,266 500 | "499",2012-05-13,403 501 | "500",2012-05-14,1376 502 | "501",2012-05-15,654 503 | "502",2012-05-16,509 504 | "503",2012-05-17,333 505 | "504",2012-05-18,355 506 | "505",2012-05-19,172 507 | "506",2012-05-20,458 508 | "507",2012-05-21,465 509 | "508",2012-05-22,421 510 | "509",2012-05-23,307 511 | "510",2012-05-24,581 512 | "511",2012-05-25,303 513 | "512",2012-05-26,146 514 | "513",2012-05-27,231 515 | "514",2012-05-28,314 516 | "515",2012-05-29,417 517 | "516",2012-05-30,753 518 | "517",2012-05-31,444 519 | "518",2012-06-01,628 520 | "519",2012-06-02,236 521 | "520",2012-06-03,238 522 | "521",2012-06-04,436 523 | "522",2012-06-05,355 524 | "523",2012-06-06,283 525 | "524",2012-06-07,243 526 | "525",2012-06-08,188 527 | "526",2012-06-09,104 528 | "527",2012-06-10,191 529 | "528",2012-06-11,539 530 | "529",2012-06-12,320 531 | "530",2012-06-13,254 532 | "531",2012-06-14,189 533 | "532",2012-06-15,287 534 | "533",2012-06-16,118 535 | "534",2012-06-17,336 536 | "535",2012-06-18,647 537 | "536",2012-06-19,386 538 | "537",2012-06-20,415 539 | "538",2012-06-21,297 540 | "539",2012-06-22,641 541 | "540",2012-06-23,217 542 | "541",2012-06-24,398 543 | "542",2012-06-25,558 544 | "543",2012-06-26,892 545 | "544",2012-06-27,551 546 | "545",2012-06-28,612 547 | "546",2012-06-29,1199 548 | "547",2012-06-30,246 549 | "548",2012-07-01,251 550 | "549",2012-07-02,399 551 | "550",2012-07-03,394 552 | "551",2012-07-04,317 553 | "552",2012-07-05,327 554 | "553",2012-07-06,367 555 | "554",2012-07-07,153 556 | "555",2012-07-08,197 557 | "556",2012-07-09,440 558 | "557",2012-07-10,289 559 | "558",2012-07-11,745 560 | "559",2012-07-12,947 561 | "560",2012-07-13,462 562 | "561",2012-07-14,286 563 | "562",2012-07-15,289 564 | "563",2012-07-16,354 565 | "564",2012-07-17,728 566 | "565",2012-07-18,1296 567 | "566",2012-07-19,1343 568 | "567",2012-07-20,707 569 | "568",2012-07-21,473 570 | "569",2012-07-22,619 571 | "570",2012-07-23,935 572 | "571",2012-07-24,929 573 | "572",2012-07-25,823 574 | "573",2012-07-26,722 575 | "574",2012-07-27,651 576 | "575",2012-07-28,368 577 | "576",2012-07-29,392 578 | "577",2012-07-30,717 579 | "578",2012-07-31,564 580 | "579",2012-08-01,460 581 | "580",2012-08-02,361 582 | "581",2012-08-03,370 583 | "582",2012-08-04,222 584 | "583",2012-08-05,267 585 | "584",2012-08-06,550 586 | "585",2012-08-07,532 587 | "586",2012-08-08,548 588 | "587",2012-08-09,549 589 | "588",2012-08-10,852 590 | "589",2012-08-11,560 591 | "590",2012-08-12,536 592 | "591",2012-08-13,678 593 | "592",2012-08-14,1047 594 | "593",2012-08-15,711 595 | "594",2012-08-16,480 596 | "595",2012-08-17,782 597 | "596",2012-08-18,365 598 | "597",2012-08-19,310 599 | "598",2012-08-20,719 600 | "599",2012-08-21,843 601 | "600",2012-08-22,634 602 | --------------------------------------------------------------------------------