├── .Rhistory ├── LICENSE.md ├── README.md ├── dates.txt ├── disease_names.txt ├── inf_dis.txt ├── infreq.txt ├── location_names.txt ├── old_new_merge.R ├── oldplotdat.txt ├── pi_names.txt ├── plotdat.txt ├── server.R ├── ui.R ├── url_names.R ├── urldat.txt └── week.csv /.Rhistory: -------------------------------------------------------------------------------- 1 | levels(d$Disease)[41] <- "Strep toxic shock synd" 2 | levels(d$Disease)[42] <- "Syphilis congenital <1yr" 3 | levels(d$Disease)[42] <- "Toxic shock synd staph" 4 | levels(d$Disease)[47] <- "Vanco Interm Staph A" 5 | levels(d$Disease)[48] <- "Vanco Resist Staph A" 6 | d$threshold[is.na(d$threshold)]<-d$c[is.na(d$threshold)] 7 | d$alert[is.na(d$alert)]<-"N" 8 | write.table(d, file="infreq.txt", row.names=FALSE, col.names=TRUE) 9 | write.table(unique(d$Disease), file="inf_dis.txt",row.names=FALSE, col.names=TRUE) 10 | shiny::runApp('Documents/PhD/Spring 15/mmwr scrape/CDCPlot') 11 | getwd() 12 | setwd("/home/nick/Documents/PhD/Spring 15/mmwr scrape/CDCPlot"") 13 | setwd("/home/nick/Documents/PhD/Spring 15/mmwr scrape/CDCPlot") 14 | all <- matrix(c( 15 | c("Cryptosporidiosis","Cryptosporidiosis", "b36e-ru3r", "2014"), 16 | c("Cryptosporidiosis","Cryptosporidiosis", "9n3x-apcd", "2015"), 17 | c("Salmonellosis", "Salmonellosis", "52cr-rw4k", "2014"), 18 | c("Salmonellosis", "Salmonellosis", "d6kj-devz", "2105"), 19 | c("Shigellosis","Shigellosis", "52cr-rw4k", "2014"), 20 | c("Shigellosis","Shigellosis", "n3wf-wtep", "2015"), 21 | c("Pertussis","Pertussis", "8rkx-vimh", "2014"), 22 | c("Pertussis","Pertussis", "d69q-iyrb", "2015"), 23 | c("Malaria","Malaria", "y6uv-t34t", "2014"), 24 | c("Malaria","Malaria", "7pb7-w9us", "2015"), 25 | c("Legionellosis","Legionellosis", "23gt-ssfe", "2014"), 26 | c("Legionellosis","Legionellosis", "ydsy-yh5w", "2015"), 27 | c("Hepatitis A", "Hepatitis..viral..acute...type.A","rg4j-6mcc", "2014"), 28 | c("Hepatitis A", "Hepatitis..viral..acute...type.A","65xe-6neq", "2015"), 29 | c("Hepatitis B, Acute", "Hepatitis..viral..acute...type.B","rg4j-6mcc","2014"), 30 | c("Hepatitis B, Acute", "Hepatitis..viral..acute...type.B","65xe-6neq","2015"), 31 | c("Hepatitis C, Acute", "Hepatitis..viral..acute...type.C","rg4j-6mcc","2014"), 32 | c("Hepatitis C, Acute", "Hepatitis..viral..acute...type.C","65xe-6neq","2015"), 33 | c("Giardiasis", "Giardiasis", "9ix3-ryt6","2014"), 34 | c("Giardiasis", "Giardiasis", "mpdg-hf57","2015"), 35 | c("Meningococcal Disease Invasive (all serogroups)", "Meningococcal.disease..invasive...All.serogroups", "y6uv-t34t","2014"), 36 | c("Meningococcal Disease Invasive (all serogroups)", "Meningococcal.disease..invasive...All.serogroups", "7pb7-w9us","2015"), 37 | c("Mumps", "Mumps", "8rkx-vimh","2014"), 38 | c("Mumps", "Mumps", "d69q-iyrb","2015"), 39 | #c("Pneumonia and Influenza Mortality Reports by City/Region, 2014", "P.I..Total","qpap-3u8w"), 40 | #leave out pneumonia for now, format is too different 41 | c("Shiga toxin-producing E. coli (STEC)", "Shiga.toxin.producing.E..coli..STEC..", "52cr-rw4k","2014"), 42 | c("Shiga toxin-producing E. coli (STEC)", "Shiga.toxin.producing.E..coli..STEC..", "n3wf-wtep","2015"), 43 | c("P&I MORT", "P&I MORT", "qpap-3u8w", "2014"), 44 | c("P&I MORT", "P&I MORT", "7esm-uptm", "2015") 45 | ) 46 | ,ncol=4, byrow=T) 47 | #Name matrix columns and write to csv file 48 | URL_NAMES <- data.frame(display_name=all[,1],data_name=all[,2],url=all[,3],year=all[,4]) 49 | write.table(URL_NAMES, file="urldat.txt", row.names=FALSE, col.names=TRUE) 50 | # Read in data with disease names and corresponding urls. This data is created from the url_names.R file, which should be run first. 51 | urldat <- read.table("urldat.txt", header=T) 52 | # A function to help deal with NA values when calculating thresholds. NA's occur when we try to 53 | # calculate running standard deviations with only one data point, and cause an error in the cumsum function. 54 | # Args: 55 | # x: A vector of disease occurance data that we wish to calculate an alert threshold for 56 | # days: an integer for the number of days to calculate the threshold over 57 | newthresh <- function(x,days){ 58 | thresh <-runmean(x, days,endrule="NA")+2*runsd(x, days,endrule="sd",align="right") 59 | thresh[is.na(thresh)]<-x[is.na(thresh)] 60 | return(thresh) 61 | } 62 | # This function takes each url and corresponding disease name and gets data from CDC. It then combines multiple years worth of data, 63 | # calculates alert thresholds and cumulative sums and returns the columns of interest from the CDC data. 64 | # Args: 65 | # url_data: the rows of the url_data.txt file which contain the urls for a given disease 66 | url_func <- function(url_data){ 67 | # Construct actual CDC website url name and get data for 2014 and 2015 68 | curl <- url_data$url 69 | URL <- paste( "https://data.cdc.gov/api/views/",curl, "/rows.csv?accessType=DOWNLOAD",sep="") 70 | nndss14 <-read.csv(textConnection(getURL(URL[1],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 71 | nndss15 <- read.csv(textConnection(getURL(URL[2],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 72 | # Some diseases have a slightly different name for MMWR.Week and MMWR.Year, so we standardize the names here 73 | if("MMWRWeek"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14,MMWR.Week=MMWRWeek )} 74 | if("MMWR.WEEK"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14,MMWR.Week=MMWR.WEEK )} 75 | if("MMWRYear"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14, MMWR.Year=MMWRYear )} 76 | if("MMWR.YEAR"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14, MMWR.Year=MMWR.YEAR )} 77 | if("MMWRWeek"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Week=MMWRWeek )} 78 | if("MMWR.WEEK"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Week=MMWR.WEEK )} 79 | if("MMWRYear"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Year=MMWRYear )} 80 | if("MMWR.YEAR"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Year=MMWR.YEAR )} 81 | # dname is the name of the column in the nndss file which contains weekly data for the disease of interest 82 | dname <- c(paste(url_data$data_name[1],"..Current.week",sep="")) 83 | #special column name for P&I mortality data 84 | if(url_data$data_name[1]=="P&I MORT")dname <- "P.I..Total" 85 | # Select relevant columns from both the 2014 and 2015 data and rbind them together 86 | nndss <- rbind(select(nndss14, contains(dname), contains("MMWR"), contains("Reporting"), -contains("flag")), 87 | select(nndss15, contains(dname), contains("MMWR"), contains("Reporting"), -contains("flag"))) 88 | # set NA values to 0, maybe not a great idea, but useful for calculating thresholds and cumulative sums 89 | names(nndss)[which(dname==names(nndss))] <- "c" 90 | nndss$c <- as.numeric(nndss$c) 91 | nndss$c[is.na(nndss$c)]<-0 92 | nndss$display_name <- url_data$display_name[1] 93 | # Create columns for 10 and 14 week thresholds and 10 and 14 week alerts, grouping by reporting area. 94 | nndss <- nndss %>% group_by(Reporting.Area) %>% mutate(fourteenwk.thresh=newthresh(c,14), 95 | tenwk.thresh=newthresh(c,10), 96 | fourteenwk.alert=c>fourteenwk.thresh, 97 | tenwk.alert=c>tenwk.thresh) 98 | # Create columns for cumulative sum along with cumulative threshold values, grouping both by reporting area and year 99 | nndss <- group_by(nndss, Reporting.Area, MMWR.Year) %>% mutate(cumulate=cumsum(c), 100 | cumu10=cumulate+(tenwk.thresh-c), 101 | cumu14=cumulate+(fourteenwk.thresh-c)) 102 | #select and return relevant columns of data table 103 | nndss<- select(nndss, one_of("c","Reporting.Area", "MMWR.Year", "MMWR.Week","display_name"), contains("thresh"),contains("cumu"),contains("alert")) 104 | return(nndss) 105 | } 106 | # Run the url_func function for each different disease name in our urldat.txt data file 107 | output <- ddply(urldat, .(data_name), url_func) 108 | Encoding(output$Reporting.Area) <- "latin1" 109 | output$Reporting.Area <- iconv(output$Reporting.Area, "latin1", "ASCII", sub="") 110 | # Write output as plotdat.csv 111 | write.table(output, file="plotdat.txt", row.names=FALSE, col.names=TRUE) 112 | # Separate output file which contains all disease names called disease_names.csv 113 | write.table(unique(output$display_name), file="disease_names.txt", row.names=FALSE, col.names=TRUE) 114 | # Separate output file which contains locations and location types (state, region, or country) called location_names.cd, doesn't include p&i data 115 | regions <-c("NEW ENGLAND", "MID. ATLANTIC", "E.N. CENTRAL", "W.N. CENTRAL", "S. ATLANTIC", 116 | "E.S. CENTRAL", "W.S. CENTRAL", "MOUNTAIN", "PACIFIC", "TERRITORIES") 117 | loc_type <- rep("state", length(unique(output$Reporting.Area[output$data_name!="P&I MORT"]))) 118 | loc_type[which(unique(output$Reporting.Area[output$data_name!="P&I MORT"])%in%regions)] <- "region" 119 | loc_type[1] <- "country" 120 | # Also include, for state locations, which region the state falls under. Thankfully, the CDC data table is ordered so that it first lists a region, then 121 | # all the states in that region, then the next region, and so on. So, between each region name, all states will be in the same region 122 | region_num=0 123 | loc_reg <- rep("NONE", length(loc_type)) 124 | for(i in 1:62){ 125 | if(loc_type[i]=="region"){ 126 | region_num = region_num+1 127 | } 128 | if(loc_type[i]=="state"){ 129 | loc_reg[i]=regions[region_num] 130 | } 131 | } 132 | loc_reg[63:67] <- "TERRITORIES" 133 | all_locs<-data.frame(location=unique(output$Reporting.Area[output$data_name!="P&I MORT"]),type=loc_type, region=loc_reg) 134 | write.table(all_locs, file="location_names.txt", row.names=FALSE, col.names=TRUE) 135 | pi_loc <- rep("city", length(unique(output$Reporting.Area[output$data_name=="P&I MORT"]))) 136 | pi_loc[which(tolower(unique(output$Reporting.Area[output$data_name=="P&I MORT"]))%in%tolower(regions))] <- "region" 137 | pi_loc[length(pi_loc)] <- "country" 138 | region_num=0 139 | pi_reg <- rep("NONE", length(pi_loc)) 140 | for(i in 1:length(pi_loc)){ 141 | if(pi_loc[i]=="region"){ 142 | region_num = region_num+1 143 | } 144 | if(pi_loc[i]=="city"){ 145 | pi_reg[i]=regions[region_num] 146 | } 147 | } 148 | pi_locs<-data.frame(location=unique(output$Reporting.Area[output$data_name=="P&I MORT"]),type=pi_loc, region=pi_reg) 149 | pi_locs <- data.frame(lapply(pi_locs, as.character), stringsAsFactors=FALSE) 150 | write.table(pi_locs, file="pi_names.txt", row.names=FALSE, col.names=TRUE) 151 | #separate code for infrequent diseases. 152 | URL <- c("https://data.cdc.gov/api/views/wcwi-x3uk/rows.csv?accessType=DOWNLOAD", 153 | "https://data.cdc.gov/api/views/pb4z-432k/rows.csv?accessType=DOWNLOAD") 154 | nndss14 <-read.csv(textConnection(getURL(URL[1],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 155 | nndss15 <- read.csv(textConnection(getURL(URL[2],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 156 | nndss <- rbind(select(nndss14, contains("Current.week"), contains("MMWR"), contains("Disease"), -contains("flag")), 157 | select(nndss15, contains("Current.week"),contains("MMWR"), contains("Disease"), -contains("flag"))) 158 | #disease names are different bewteen years, try to clean some disease names up 159 | Encoding(nndss$Disease) <- "latin1" 160 | nndss$Disease <- iconv(nndss$Disease, "latin1", "ASCII", sub="") 161 | nndss$Disease <- gsub(":","",nndss$Disease) 162 | nndss$Disease <- gsub(",","",nndss$Disease) 163 | nndss$Disease <- gsub("\\*","",nndss$Disease) 164 | #remove all disease names which aren't present in both years 165 | nndss <- nndss[-which(nndss$Disease%in%names(which(table(nndss$Disease)<54))),] 166 | d <- nndss %>% group_by(Disease, MMWR.week, MMWR.year)%>%summarise(c=Current.week) 167 | d$c <- as.numeric(d$c) 168 | d <- d %>% mutate(c = ifelse(is.na(c),0,c)) 169 | d <- d %>% group_by(Disease) %>% mutate("fourteenweekmean"=runmean(c, 2, align="right")) 170 | d <- d %>% mutate(fourteenweekmean = ifelse(is.na(fourteenweekmean),0,fourteenweekmean)) 171 | d <- d%>% group_by(Disease) %>% mutate ("fourteenweeksd"=runsd(c, 14,center=runmean(c,14),align="right")) 172 | d$sd2 <- d$fourteenweeksd*2 173 | d$threshold <- d$fourteenweekmean + d$sd2 174 | d$alert <- ifelse(d$c > d$threshold, "Y", "N") 175 | d$Disease <- as.factor(d$Disease) 176 | levels(d$Disease)[3] <- "Arbo,EEE" 177 | levels(d$Disease)[2] <- "Arbo,CA serogroup" 178 | levels(d$Disease)[4] <- "Arbo,Powassan" 179 | levels(d$Disease)[5] <- "Arbo,St Louis" 180 | levels(d$Disease)[6] <- "Arbo,WEE" 181 | levels(d$Disease)[9] <- "Botulism other" 182 | levels(d$Disease)[14] <- "Cyclosporiasis" 183 | levels(d$Disease)[16] <- "H flu <5 non-b" 184 | levels(d$Disease)[17] <- "H flu <5 b" 185 | levels(d$Disease)[18] <- "H flu <5 unknown" 186 | levels(d$Disease)[19] <- "Hansen Disease" 187 | levels(d$Disease)[20] <- "HUS,postdiarrheal" 188 | levels(d$Disease)[21] <- "HBV,perinatal" 189 | levels(d$Disease)[22] <- "Influenza ped mort" 190 | levels(d$Disease)[25] <- "Measles" 191 | levels(d$Disease)[26] <- "Mening a,c,y,w-135" 192 | levels(d$Disease)[27] <- "Mening other" 193 | levels(d$Disease)[28] <- "Mening serogroup b" 194 | levels(d$Disease)[29] <- "Mening unknown" 195 | levels(d$Disease)[30] <- "Novel influenza A" 196 | levels(d$Disease)[32] <- "Polio nonparalytic" 197 | levels(d$Disease)[34] <- "Psittacosis" 198 | levels(d$Disease)[37] <- "Q Fever, Total" 199 | levels(d$Disease)[39] <- "SARS-CoV" 200 | levels(d$Disease)[40] <- "Smallpox" 201 | levels(d$Disease)[41] <- "Strep toxic shock synd" 202 | levels(d$Disease)[42] <- "Syphilis congenital <1yr" 203 | levels(d$Disease)[42] <- "Toxic shock synd staph" 204 | levels(d$Disease)[47] <- "Vanco Interm Staph A" 205 | levels(d$Disease)[48] <- "Vanco Resist Staph A" 206 | d$threshold[is.na(d$threshold)]<-d$c[is.na(d$threshold)] 207 | d$alert[is.na(d$alert)]<-"N" 208 | write.table(d, file="infreq.txt", row.names=FALSE, col.names=TRUE) 209 | write.table(unique(d$Disease), file="inf_dis.txt",row.names=FALSE, col.names=TRUE) 210 | shiny::runApp() 211 | pi_names <- read.table("pi_names.txt", header=T,colClasses=c("character","character")) 212 | pi_names$region 213 | shiny::runApp() 214 | sort(filter(pi_names, type=="region")$location) 215 | sort(filter(location_names, type=="region")$location) 216 | location_names <- read.table("location_names.txt", header=T, colClasses=c("character","character")) 217 | sort(filter(location_names, type=="region")$location) 218 | shiny::runApp() 219 | ?toupper 220 | toupper(pi_names$location[which(pi_names$type=="region")]) 221 | pi_names$location[which(pi_names$type=="region")] 222 | shiny::runApp() 223 | pi_names$type 224 | pi_names$location 225 | pi_names$location[which(pi_names$type=="region")]<- toupper(pi_names$location[which(pi_names$type=="region")]) 226 | pi_names$location 227 | filter(cdcdata, display_name == "P&I MORT", Reporting.Area %in% pi_names$location[which(pi_names$type=="region")]) 228 | cdcdata <- read.table("plotdat.txt", header=T) 229 | filter(cdcdata, display_name == "P&I MORT", Reporting.Area %in% pi_names$location[which(pi_names$type=="region")]) 230 | pi_names$location[which(pi_names$type=="region")] 231 | unique(cdcdata$Reporting.Area) 232 | pi_names <- read.table("pi_names.txt", header=T,colClasses=c("character","character")) 233 | pi_names 234 | shiny::runApp() 235 | cdcdata$Reporting.Area 236 | location_names 237 | toupper(cdcdata$Reporting.Area[toupper(cdcdata$Reporting.Area)%in%location_names$region]) 238 | shiny::runApp() 239 | infreq <- read.table("infreq.txt", header=T) 240 | infreq 241 | head(infreq) 242 | head() 243 | head(d) 244 | d <- d %>% group_by(Disease, MMWR.Year) %>% mutate(cumulate=cumsum(c), 245 | cumu14=cumulate+(threshold-c)) 246 | ?mutate 247 | d <- d %>% group_by(Disease, MMWR.year) %>% mutate(cumulate=cumsum(c), 248 | cumu14=cumulate+(threshold-c)) 249 | d$Disease <- as.factor(d$Disease) 250 | levels(d$Disease)[3] <- "Arbo,EEE" 251 | levels(d$Disease)[2] <- "Arbo,CA serogroup" 252 | levels(d$Disease)[4] <- "Arbo,Powassan" 253 | levels(d$Disease)[5] <- "Arbo,St Louis" 254 | levels(d$Disease)[6] <- "Arbo,WEE" 255 | levels(d$Disease)[9] <- "Botulism other" 256 | levels(d$Disease)[14] <- "Cyclosporiasis" 257 | levels(d$Disease)[16] <- "H flu <5 non-b" 258 | levels(d$Disease)[17] <- "H flu <5 b" 259 | levels(d$Disease)[18] <- "H flu <5 unknown" 260 | levels(d$Disease)[19] <- "Hansen Disease" 261 | levels(d$Disease)[20] <- "HUS,postdiarrheal" 262 | levels(d$Disease)[21] <- "HBV,perinatal" 263 | levels(d$Disease)[22] <- "Influenza ped mort" 264 | levels(d$Disease)[25] <- "Measles" 265 | levels(d$Disease)[26] <- "Mening a,c,y,w-135" 266 | levels(d$Disease)[27] <- "Mening other" 267 | levels(d$Disease)[28] <- "Mening serogroup b" 268 | levels(d$Disease)[29] <- "Mening unknown" 269 | levels(d$Disease)[30] <- "Novel influenza A" 270 | levels(d$Disease)[32] <- "Polio nonparalytic" 271 | levels(d$Disease)[34] <- "Psittacosis" 272 | levels(d$Disease)[37] <- "Q Fever, Total" 273 | levels(d$Disease)[39] <- "SARS-CoV" 274 | levels(d$Disease)[40] <- "Smallpox" 275 | levels(d$Disease)[41] <- "Strep toxic shock synd" 276 | levels(d$Disease)[42] <- "Syphilis congenital <1yr" 277 | levels(d$Disease)[42] <- "Toxic shock synd staph" 278 | levels(d$Disease)[47] <- "Vanco Interm Staph A" 279 | levels(d$Disease)[48] <- "Vanco Resist Staph A" 280 | d$threshold[is.na(d$threshold)]<-d$c[is.na(d$threshold)] 281 | d$alert[is.na(d$alert)]<-"N" 282 | write.table(d, file="infreq.txt", row.names=FALSE, col.names=TRUE) 283 | write.table(unique(d$Disease), file="inf_dis.txt",row.names=FALSE, col.names=TRUE) 284 | shiny::runApp() 285 | d <- nndss %>% group_by(Disease, MMWR.week, MMWR.year)%>%summarise(c=Current.week) 286 | d$c <- as.numeric(d$c) 287 | d <- d %>% mutate(c = ifelse(is.na(c),0,c)) 288 | d <- d %>% group_by(Disease) %>% mutate("fourteenweekmean"=runmean(c, 14, align="right")) 289 | d <- d %>% mutate(fourteenweekmean = ifelse(is.na(fourteenweekmean),0,fourteenweekmean)) 290 | d <- d%>% group_by(Disease) %>% mutate ("fourteenweeksd"=runsd(c, 14,center=runmean(c,14),align="right")) 291 | d$sd2 <- d$fourteenweeksd*2 292 | d$threshold <- d$fourteenweekmean + d$sd2 293 | d$alert <- ifelse(d$c > d$threshold, "Y", "N") 294 | d <- d %>% group_by(Disease, MMWR.year) %>% mutate(cumulate=cumsum(c), 295 | cumu14=cumulate+(threshold-c)) 296 | d$Disease <- as.factor(d$Disease) 297 | levels(d$Disease)[3] <- "Arbo,EEE" 298 | levels(d$Disease)[2] <- "Arbo,CA serogroup" 299 | levels(d$Disease)[4] <- "Arbo,Powassan" 300 | levels(d$Disease)[5] <- "Arbo,St Louis" 301 | levels(d$Disease)[6] <- "Arbo,WEE" 302 | levels(d$Disease)[9] <- "Botulism other" 303 | levels(d$Disease)[14] <- "Cyclosporiasis" 304 | levels(d$Disease)[16] <- "H flu <5 non-b" 305 | levels(d$Disease)[17] <- "H flu <5 b" 306 | levels(d$Disease)[18] <- "H flu <5 unknown" 307 | levels(d$Disease)[19] <- "Hansen Disease" 308 | levels(d$Disease)[20] <- "HUS,postdiarrheal" 309 | levels(d$Disease)[21] <- "HBV,perinatal" 310 | levels(d$Disease)[22] <- "Influenza ped mort" 311 | levels(d$Disease)[25] <- "Measles" 312 | levels(d$Disease)[26] <- "Mening a,c,y,w-135" 313 | levels(d$Disease)[27] <- "Mening other" 314 | levels(d$Disease)[28] <- "Mening serogroup b" 315 | levels(d$Disease)[29] <- "Mening unknown" 316 | levels(d$Disease)[30] <- "Novel influenza A" 317 | levels(d$Disease)[32] <- "Polio nonparalytic" 318 | levels(d$Disease)[34] <- "Psittacosis" 319 | levels(d$Disease)[37] <- "Q Fever, Total" 320 | levels(d$Disease)[39] <- "SARS-CoV" 321 | levels(d$Disease)[40] <- "Smallpox" 322 | levels(d$Disease)[41] <- "Strep toxic shock synd" 323 | levels(d$Disease)[42] <- "Syphilis congenital <1yr" 324 | levels(d$Disease)[42] <- "Toxic shock synd staph" 325 | levels(d$Disease)[47] <- "Vanco Interm Staph A" 326 | levels(d$Disease)[48] <- "Vanco Resist Staph A" 327 | d$threshold[is.na(d$threshold)]<-d$c[is.na(d$threshold)] 328 | d$alert[is.na(d$alert)]<-"N" 329 | write.table(d, file="infreq.txt", row.names=FALSE, col.names=TRUE) 330 | write.table(unique(d$Disease), file="inf_dis.txt",row.names=FALSE, col.names=TRUE) 331 | shiny::runApp() 332 | pi_names 333 | shiny::runApp() 334 | ?h4 335 | shiny::runApp() 336 | shiny::runApp() 337 | URL <- "https://ibis.health.state.nm.us/resources/MMWRWeekCalendar.html" 338 | getURL(URL,ssl.verifypeer=FALSE)) 339 | getURL(URL,ssl.verifypeer=FALSE) 340 | library(XML) 341 | readHTMLTable(URL) 342 | htmlParse(URL) 343 | URL <- "https://ibis.health.state.nm.us/resources/MMWRWeekCalendar.html" 344 | htmlParse(URL) 345 | getURL(URL) 346 | dates <-getURL(URL) 347 | readHTMLTable(dates) 348 | htmlParse(dates) 349 | readHTMLTable(dates) 350 | readHTMLTable(dates)$'NULL' 351 | str(readHTMLTable(dates)) 352 | readHTMLTable(dates)[[7]] 353 | URL <- "https://ibis.health.state.nm.us/resources/MMWRWeekCalendar.html" 354 | dates <-getURL(URL) 355 | dates <- readHTMLTable(dates)[[7]] 356 | str(dates) 357 | library(lubridate) 358 | ?readHTMLTable 359 | dates <- readHTMLTable(dates, header=T)[[7]] 360 | dates <-getURL(URL) 361 | dates <- readHTMLTable(dates, header=T)[[7]] 362 | dates 363 | names(dates) <- c("MMWR.Week", "2011", "2012", "2013", "2014", "2015") 364 | dates 365 | dates <- dates[-1,] 366 | dates 367 | dates$2014 368 | names(dates) <- c("MMWR.Week", "y2011", "y2012", "y2013", "y2014", "y2015") 369 | dates$y2014 370 | ?which 371 | getwd() 372 | write.table(dates, file="dates.txt", row.names=FALSE, col.names=TRUE) 373 | dates <- read.table("dates.txt", header=T) 374 | dates 375 | newthresh <- function(x,days){ 376 | thresh <-runmean(x, days,endrule="NA")+2*runsd(x, days,endrule="sd",align="right") 377 | thresh[is.na(thresh)]<-x[is.na(thresh)] 378 | return(thresh) 379 | } 380 | # This function takes each url and corresponding disease name and gets data from CDC. It then combines multiple years worth of data, 381 | # calculates alert thresholds and cumulative sums and returns the columns of interest from the CDC data. 382 | # Args: 383 | # url_data: the rows of the url_data.txt file which contain the urls for a given disease 384 | url_func <- function(url_data){ 385 | # Construct actual CDC website url name and get data for 2014 and 2015 386 | curl <- url_data$url 387 | URL <- paste( "https://data.cdc.gov/api/views/",curl, "/rows.csv?accessType=DOWNLOAD",sep="") 388 | nndss14 <-read.csv(textConnection(getURL(URL[1],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 389 | nndss15 <- read.csv(textConnection(getURL(URL[2],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 390 | # Some diseases have a slightly different name for MMWR.Week and MMWR.Year, so we standardize the names here 391 | if("MMWRWeek"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14,MMWR.Week=MMWRWeek )} 392 | if("MMWR.WEEK"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14,MMWR.Week=MMWR.WEEK )} 393 | if("MMWRYear"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14, MMWR.Year=MMWRYear )} 394 | if("MMWR.YEAR"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14, MMWR.Year=MMWR.YEAR )} 395 | if("MMWRWeek"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Week=MMWRWeek )} 396 | if("MMWR.WEEK"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Week=MMWR.WEEK )} 397 | if("MMWRYear"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Year=MMWRYear )} 398 | if("MMWR.YEAR"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Year=MMWR.YEAR )} 399 | # dname is the name of the column in the nndss file which contains weekly data for the disease of interest 400 | dname <- c(paste(url_data$data_name[1],"..Current.week",sep="")) 401 | #special column name for P&I mortality data 402 | if(url_data$data_name[1]=="P&I MORT")dname <- "P.I..Total" 403 | # Select relevant columns from both the 2014 and 2015 data and rbind them together 404 | nndss <- rbind(select(nndss14, contains(dname), contains("MMWR"), contains("Reporting"), -contains("flag")), 405 | select(nndss15, contains(dname), contains("MMWR"), contains("Reporting"), -contains("flag"))) 406 | # set NA values to 0, maybe not a great idea, but useful for calculating thresholds and cumulative sums 407 | names(nndss)[which(dname==names(nndss))] <- "c" 408 | nndss$c <- as.numeric(nndss$c) 409 | nndss$c[is.na(nndss$c)]<-0 410 | nndss$display_name <- url_data$display_name[1] 411 | # Create columns for 10 and 14 week thresholds and 10 and 14 week alerts, grouping by reporting area. 412 | nndss <- nndss %>% group_by(Reporting.Area) %>% mutate(fourteenwk.thresh=newthresh(c,14), 413 | tenwk.thresh=newthresh(c,10), 414 | fourteenwk.alert=c>fourteenwk.thresh, 415 | tenwk.alert=c>tenwk.thresh) 416 | # Create columns for cumulative sum along with cumulative threshold values, grouping both by reporting area and year 417 | nndss <- group_by(nndss, Reporting.Area, MMWR.Year) %>% mutate(cumulate=cumsum(c), 418 | cumu10=cumulate+(tenwk.thresh-c), 419 | cumu14=cumulate+(fourteenwk.thresh-c)) 420 | #select and return relevant columns of data table 421 | nndss<- select(nndss, one_of("c","Reporting.Area", "MMWR.Year", "MMWR.Week","display_name"), contains("thresh"),contains("cumu"),contains("alert")) 422 | nndss$date <- apply(nndss,1, function(x){return(dates[which(dates$MMWR.Week==x$MMWR.Week),5+(x$MMWR.Year-2014)])} 423 | return(nndss) 424 | } 425 | # Run the url_func function for each different disease name in our urldat.txt data file 426 | output <- ddply(urldat, .(data_name), url_func) 427 | url_func <- function(url_data){ 428 | # Construct actual CDC website url name and get data for 2014 and 2015 429 | curl <- url_data$url 430 | URL <- paste( "https://data.cdc.gov/api/views/",curl, "/rows.csv?accessType=DOWNLOAD",sep="") 431 | nndss14 <-read.csv(textConnection(getURL(URL[1],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 432 | nndss15 <- read.csv(textConnection(getURL(URL[2],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 433 | # Some diseases have a slightly different name for MMWR.Week and MMWR.Year, so we standardize the names here 434 | if("MMWRWeek"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14,MMWR.Week=MMWRWeek )} 435 | if("MMWR.WEEK"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14,MMWR.Week=MMWR.WEEK )} 436 | if("MMWRYear"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14, MMWR.Year=MMWRYear )} 437 | if("MMWR.YEAR"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14, MMWR.Year=MMWR.YEAR )} 438 | if("MMWRWeek"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Week=MMWRWeek )} 439 | if("MMWR.WEEK"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Week=MMWR.WEEK )} 440 | if("MMWRYear"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Year=MMWRYear )} 441 | if("MMWR.YEAR"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Year=MMWR.YEAR )} 442 | # dname is the name of the column in the nndss file which contains weekly data for the disease of interest 443 | dname <- c(paste(url_data$data_name[1],"..Current.week",sep="")) 444 | #special column name for P&I mortality data 445 | if(url_data$data_name[1]=="P&I MORT")dname <- "P.I..Total" 446 | # Select relevant columns from both the 2014 and 2015 data and rbind them together 447 | nndss <- rbind(select(nndss14, contains(dname), contains("MMWR"), contains("Reporting"), -contains("flag")), 448 | select(nndss15, contains(dname), contains("MMWR"), contains("Reporting"), -contains("flag"))) 449 | # set NA values to 0, maybe not a great idea, but useful for calculating thresholds and cumulative sums 450 | names(nndss)[which(dname==names(nndss))] <- "c" 451 | nndss$c <- as.numeric(nndss$c) 452 | nndss$c[is.na(nndss$c)]<-0 453 | nndss$display_name <- url_data$display_name[1] 454 | # Create columns for 10 and 14 week thresholds and 10 and 14 week alerts, grouping by reporting area. 455 | nndss <- nndss %>% group_by(Reporting.Area) %>% mutate(fourteenwk.thresh=newthresh(c,14), 456 | tenwk.thresh=newthresh(c,10), 457 | fourteenwk.alert=c>fourteenwk.thresh, 458 | tenwk.alert=c>tenwk.thresh) 459 | # Create columns for cumulative sum along with cumulative threshold values, grouping both by reporting area and year 460 | nndss <- group_by(nndss, Reporting.Area, MMWR.Year) %>% mutate(cumulate=cumsum(c), 461 | cumu10=cumulate+(tenwk.thresh-c), 462 | cumu14=cumulate+(fourteenwk.thresh-c)) 463 | #select and return relevant columns of data table 464 | nndss<- select(nndss, one_of("c","Reporting.Area", "MMWR.Year", "MMWR.Week","display_name"), contains("thresh"),contains("cumu"),contains("alert")) 465 | nndss$date <- apply(nndss,1, function(x){return(dates[which(dates$MMWR.Week==x$MMWR.Week),5+(x$MMWR.Year-2014)])}) 466 | return(nndss) 467 | } 468 | output <- ddply(urldat, .(data_name), url_func) 469 | dates$MMWR.Week 470 | ?apply 471 | url_func <- function(url_data){ 472 | # Construct actual CDC website url name and get data for 2014 and 2015 473 | curl <- url_data$url 474 | URL <- paste( "https://data.cdc.gov/api/views/",curl, "/rows.csv?accessType=DOWNLOAD",sep="") 475 | nndss14 <-read.csv(textConnection(getURL(URL[1],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 476 | nndss15 <- read.csv(textConnection(getURL(URL[2],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 477 | # Some diseases have a slightly different name for MMWR.Week and MMWR.Year, so we standardize the names here 478 | if("MMWRWeek"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14,MMWR.Week=MMWRWeek )} 479 | if("MMWR.WEEK"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14,MMWR.Week=MMWR.WEEK )} 480 | if("MMWRYear"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14, MMWR.Year=MMWRYear )} 481 | if("MMWR.YEAR"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14, MMWR.Year=MMWR.YEAR )} 482 | if("MMWRWeek"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Week=MMWRWeek )} 483 | if("MMWR.WEEK"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Week=MMWR.WEEK )} 484 | if("MMWRYear"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Year=MMWRYear )} 485 | if("MMWR.YEAR"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Year=MMWR.YEAR )} 486 | # dname is the name of the column in the nndss file which contains weekly data for the disease of interest 487 | dname <- c(paste(url_data$data_name[1],"..Current.week",sep="")) 488 | #special column name for P&I mortality data 489 | if(url_data$data_name[1]=="P&I MORT")dname <- "P.I..Total" 490 | # Select relevant columns from both the 2014 and 2015 data and rbind them together 491 | nndss <- rbind(select(nndss14, contains(dname), contains("MMWR"), contains("Reporting"), -contains("flag")), 492 | select(nndss15, contains(dname), contains("MMWR"), contains("Reporting"), -contains("flag"))) 493 | # set NA values to 0, maybe not a great idea, but useful for calculating thresholds and cumulative sums 494 | names(nndss)[which(dname==names(nndss))] <- "c" 495 | nndss$c <- as.numeric(nndss$c) 496 | nndss$c[is.na(nndss$c)]<-0 497 | nndss$display_name <- url_data$display_name[1] 498 | # Create columns for 10 and 14 week thresholds and 10 and 14 week alerts, grouping by reporting area. 499 | nndss <- nndss %>% group_by(Reporting.Area) %>% mutate(fourteenwk.thresh=newthresh(c,14), 500 | tenwk.thresh=newthresh(c,10), 501 | fourteenwk.alert=c>fourteenwk.thresh, 502 | tenwk.alert=c>tenwk.thresh) 503 | # Create columns for cumulative sum along with cumulative threshold values, grouping both by reporting area and year 504 | nndss <- group_by(nndss, Reporting.Area, MMWR.Year) %>% mutate(cumulate=cumsum(c), 505 | cumu10=cumulate+(tenwk.thresh-c), 506 | cumu14=cumulate+(fourteenwk.thresh-c)) 507 | #select and return relevant columns of data table 508 | nndss<- select(nndss, one_of("c","Reporting.Area", "MMWR.Year", "MMWR.Week","display_name"), contains("thresh"),contains("cumu"),contains("alert")) 509 | nndss$date <- apply(nndss,1, function(x){return(dates[which(datesMMWR.Week==x['MMWR.Week']),5+(x['MMWR.Year']-2014)])}) 510 | return(nndss) 511 | } 512 | output <- ddply(urldat, .(data_name), url_func) 513 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 2, June 1991 3 | 4 | Copyright (C) 1989, 1991 Free Software Foundation, Inc., 5 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 6 | Everyone is permitted to copy and distribute verbatim copies 7 | of this license document, but changing it is not allowed. 8 | 9 | Preamble 10 | 11 | The licenses for most software are designed to take away your 12 | freedom to share and change it. By contrast, the GNU General Public 13 | License is intended to guarantee your freedom to share and change free 14 | software--to make sure the software is free for all its users. This 15 | General Public License applies to most of the Free Software 16 | Foundation's software and to any other program whose authors commit to 17 | using it. (Some other Free Software Foundation software is covered by 18 | the GNU Lesser General Public License instead.) You can apply it to 19 | your programs, too. 20 | 21 | When we speak of free software, we are referring to freedom, not 22 | price. Our General Public Licenses are designed to make sure that you 23 | have the freedom to distribute copies of free software (and charge for 24 | this service if you wish), that you receive source code or can get it 25 | if you want it, that you can change the software or use pieces of it 26 | in new free programs; and that you know you can do these things. 27 | 28 | To protect your rights, we need to make restrictions that forbid 29 | anyone to deny you these rights or to ask you to surrender the rights. 30 | These restrictions translate to certain responsibilities for you if you 31 | distribute copies of the software, or if you modify it. 32 | 33 | For example, if you distribute copies of such a program, whether 34 | gratis or for a fee, you must give the recipients all the rights that 35 | you have. You must make sure that they, too, receive or can get the 36 | source code. And you must show them these terms so they know their 37 | rights. 38 | 39 | We protect your rights with two steps: (1) copyright the software, and 40 | (2) offer you this license which gives you legal permission to copy, 41 | distribute and/or modify the software. 42 | 43 | Also, for each author's protection and ours, we want to make certain 44 | that everyone understands that there is no warranty for this free 45 | software. If the software is modified by someone else and passed on, we 46 | want its recipients to know that what they have is not the original, so 47 | that any problems introduced by others will not reflect on the original 48 | authors' reputations. 49 | 50 | Finally, any free program is threatened constantly by software 51 | patents. We wish to avoid the danger that redistributors of a free 52 | program will individually obtain patent licenses, in effect making the 53 | program proprietary. To prevent this, we have made it clear that any 54 | patent must be licensed for everyone's free use or not licensed at all. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | GNU GENERAL PUBLIC LICENSE 60 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 61 | 62 | 0. This License applies to any program or other work which contains 63 | a notice placed by the copyright holder saying it may be distributed 64 | under the terms of this General Public License. The "Program", below, 65 | refers to any such program or work, and a "work based on the Program" 66 | means either the Program or any derivative work under copyright law: 67 | that is to say, a work containing the Program or a portion of it, 68 | either verbatim or with modifications and/or translated into another 69 | language. (Hereinafter, translation is included without limitation in 70 | the term "modification".) Each licensee is addressed as "you". 71 | 72 | Activities other than copying, distribution and modification are not 73 | covered by this License; they are outside its scope. The act of 74 | running the Program is not restricted, and the output from the Program 75 | is covered only if its contents constitute a work based on the 76 | Program (independent of having been made by running the Program). 77 | Whether that is true depends on what the Program does. 78 | 79 | 1. You may copy and distribute verbatim copies of the Program's 80 | source code as you receive it, in any medium, provided that you 81 | conspicuously and appropriately publish on each copy an appropriate 82 | copyright notice and disclaimer of warranty; keep intact all the 83 | notices that refer to this License and to the absence of any warranty; 84 | and give any other recipients of the Program a copy of this License 85 | along with the Program. 86 | 87 | You may charge a fee for the physical act of transferring a copy, and 88 | you may at your option offer warranty protection in exchange for a fee. 89 | 90 | 2. You may modify your copy or copies of the Program or any portion 91 | of it, thus forming a work based on the Program, and copy and 92 | distribute such modifications or work under the terms of Section 1 93 | above, provided that you also meet all of these conditions: 94 | 95 | a) You must cause the modified files to carry prominent notices 96 | stating that you changed the files and the date of any change. 97 | 98 | b) You must cause any work that you distribute or publish, that in 99 | whole or in part contains or is derived from the Program or any 100 | part thereof, to be licensed as a whole at no charge to all third 101 | parties under the terms of this License. 102 | 103 | c) If the modified program normally reads commands interactively 104 | when run, you must cause it, when started running for such 105 | interactive use in the most ordinary way, to print or display an 106 | announcement including an appropriate copyright notice and a 107 | notice that there is no warranty (or else, saying that you provide 108 | a warranty) and that users may redistribute the program under 109 | these conditions, and telling the user how to view a copy of this 110 | License. (Exception: if the Program itself is interactive but 111 | does not normally print such an announcement, your work based on 112 | the Program is not required to print an announcement.) 113 | 114 | These requirements apply to the modified work as a whole. If 115 | identifiable sections of that work are not derived from the Program, 116 | and can be reasonably considered independent and separate works in 117 | themselves, then this License, and its terms, do not apply to those 118 | sections when you distribute them as separate works. But when you 119 | distribute the same sections as part of a whole which is a work based 120 | on the Program, the distribution of the whole must be on the terms of 121 | this License, whose permissions for other licensees extend to the 122 | entire whole, and thus to each and every part regardless of who wrote it. 123 | 124 | Thus, it is not the intent of this section to claim rights or contest 125 | your rights to work written entirely by you; rather, the intent is to 126 | exercise the right to control the distribution of derivative or 127 | collective works based on the Program. 128 | 129 | In addition, mere aggregation of another work not based on the Program 130 | with the Program (or with a work based on the Program) on a volume of 131 | a storage or distribution medium does not bring the other work under 132 | the scope of this License. 133 | 134 | 3. You may copy and distribute the Program (or a work based on it, 135 | under Section 2) in object code or executable form under the terms of 136 | Sections 1 and 2 above provided that you also do one of the following: 137 | 138 | a) Accompany it with the complete corresponding machine-readable 139 | source code, which must be distributed under the terms of Sections 140 | 1 and 2 above on a medium customarily used for software interchange; or, 141 | 142 | b) Accompany it with a written offer, valid for at least three 143 | years, to give any third party, for a charge no more than your 144 | cost of physically performing source distribution, a complete 145 | machine-readable copy of the corresponding source code, to be 146 | distributed under the terms of Sections 1 and 2 above on a medium 147 | customarily used for software interchange; or, 148 | 149 | c) Accompany it with the information you received as to the offer 150 | to distribute corresponding source code. (This alternative is 151 | allowed only for noncommercial distribution and only if you 152 | received the program in object code or executable form with such 153 | an offer, in accord with Subsection b above.) 154 | 155 | The source code for a work means the preferred form of the work for 156 | making modifications to it. For an executable work, complete source 157 | code means all the source code for all modules it contains, plus any 158 | associated interface definition files, plus the scripts used to 159 | control compilation and installation of the executable. However, as a 160 | special exception, the source code distributed need not include 161 | anything that is normally distributed (in either source or binary 162 | form) with the major components (compiler, kernel, and so on) of the 163 | operating system on which the executable runs, unless that component 164 | itself accompanies the executable. 165 | 166 | If distribution of executable or object code is made by offering 167 | access to copy from a designated place, then offering equivalent 168 | access to copy the source code from the same place counts as 169 | distribution of the source code, even though third parties are not 170 | compelled to copy the source along with the object code. 171 | 172 | 4. You may not copy, modify, sublicense, or distribute the Program 173 | except as expressly provided under this License. Any attempt 174 | otherwise to copy, modify, sublicense or distribute the Program is 175 | void, and will automatically terminate your rights under this License. 176 | However, parties who have received copies, or rights, from you under 177 | this License will not have their licenses terminated so long as such 178 | parties remain in full compliance. 179 | 180 | 5. You are not required to accept this License, since you have not 181 | signed it. However, nothing else grants you permission to modify or 182 | distribute the Program or its derivative works. These actions are 183 | prohibited by law if you do not accept this License. Therefore, by 184 | modifying or distributing the Program (or any work based on the 185 | Program), you indicate your acceptance of this License to do so, and 186 | all its terms and conditions for copying, distributing or modifying 187 | the Program or works based on it. 188 | 189 | 6. Each time you redistribute the Program (or any work based on the 190 | Program), the recipient automatically receives a license from the 191 | original licensor to copy, distribute or modify the Program subject to 192 | these terms and conditions. You may not impose any further 193 | restrictions on the recipients' exercise of the rights granted herein. 194 | You are not responsible for enforcing compliance by third parties to 195 | this License. 196 | 197 | 7. If, as a consequence of a court judgment or allegation of patent 198 | infringement or for any other reason (not limited to patent issues), 199 | conditions are imposed on you (whether by court order, agreement or 200 | otherwise) that contradict the conditions of this License, they do not 201 | excuse you from the conditions of this License. If you cannot 202 | distribute so as to satisfy simultaneously your obligations under this 203 | License and any other pertinent obligations, then as a consequence you 204 | may not distribute the Program at all. For example, if a patent 205 | license would not permit royalty-free redistribution of the Program by 206 | all those who receive copies directly or indirectly through you, then 207 | the only way you could satisfy both it and this License would be to 208 | refrain entirely from distribution of the Program. 209 | 210 | If any portion of this section is held invalid or unenforceable under 211 | any particular circumstance, the balance of the section is intended to 212 | apply and the section as a whole is intended to apply in other 213 | circumstances. 214 | 215 | It is not the purpose of this section to induce you to infringe any 216 | patents or other property right claims or to contest validity of any 217 | such claims; this section has the sole purpose of protecting the 218 | integrity of the free software distribution system, which is 219 | implemented by public license practices. Many people have made 220 | generous contributions to the wide range of software distributed 221 | through that system in reliance on consistent application of that 222 | system; it is up to the author/donor to decide if he or she is willing 223 | to distribute software through any other system and a licensee cannot 224 | impose that choice. 225 | 226 | This section is intended to make thoroughly clear what is believed to 227 | be a consequence of the rest of this License. 228 | 229 | 8. If the distribution and/or use of the Program is restricted in 230 | certain countries either by patents or by copyrighted interfaces, the 231 | original copyright holder who places the Program under this License 232 | may add an explicit geographical distribution limitation excluding 233 | those countries, so that distribution is permitted only in or among 234 | countries not thus excluded. In such case, this License incorporates 235 | the limitation as if written in the body of this License. 236 | 237 | 9. The Free Software Foundation may publish revised and/or new versions 238 | of the General Public License from time to time. Such new versions will 239 | be similar in spirit to the present version, but may differ in detail to 240 | address new problems or concerns. 241 | 242 | Each version is given a distinguishing version number. If the Program 243 | specifies a version number of this License which applies to it and "any 244 | later version", you have the option of following the terms and conditions 245 | either of that version or of any later version published by the Free 246 | Software Foundation. If the Program does not specify a version number of 247 | this License, you may choose any version ever published by the Free Software 248 | Foundation. 249 | 250 | 10. If you wish to incorporate parts of the Program into other free 251 | programs whose distribution conditions are different, write to the author 252 | to ask for permission. For software which is copyrighted by the Free 253 | Software Foundation, write to the Free Software Foundation; we sometimes 254 | make exceptions for this. Our decision will be guided by the two goals 255 | of preserving the free status of all derivatives of our free software and 256 | of promoting the sharing and reuse of software generally. 257 | 258 | NO WARRANTY 259 | 260 | 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY 261 | FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN 262 | OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES 263 | PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED 264 | OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 265 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS 266 | TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE 267 | PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, 268 | REPAIR OR CORRECTION. 269 | 270 | 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 271 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR 272 | REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, 273 | INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING 274 | OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED 275 | TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY 276 | YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER 277 | PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE 278 | POSSIBILITY OF SUCH DAMAGES. 279 | 280 | END OF TERMS AND CONDITIONS 281 | 282 | How to Apply These Terms to Your New Programs 283 | 284 | If you develop a new program, and you want it to be of the greatest 285 | possible use to the public, the best way to achieve this is to make it 286 | free software which everyone can redistribute and change under these terms. 287 | 288 | To do so, attach the following notices to the program. It is safest 289 | to attach them to the start of each source file to most effectively 290 | convey the exclusion of warranty; and each file should have at least 291 | the "copyright" line and a pointer to where the full notice is found. 292 | 293 | {description} 294 | Copyright (C) {year} {fullname} 295 | 296 | This program is free software; you can redistribute it and/or modify 297 | it under the terms of the GNU General Public License as published by 298 | the Free Software Foundation; either version 2 of the License, or 299 | (at your option) any later version. 300 | 301 | This program is distributed in the hope that it will be useful, 302 | but WITHOUT ANY WARRANTY; without even the implied warranty of 303 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 304 | GNU General Public License for more details. 305 | 306 | You should have received a copy of the GNU General Public License along 307 | with this program; if not, write to the Free Software Foundation, Inc., 308 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 309 | 310 | Also add information on how to contact you by electronic and paper mail. 311 | 312 | If the program is interactive, make it output a short notice like this 313 | when it starts in an interactive mode: 314 | 315 | Gnomovision version 69, Copyright (C) year name of author 316 | Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 317 | This is free software, and you are welcome to redistribute it 318 | under certain conditions; type `show c' for details. 319 | 320 | The hypothetical commands `show w' and `show c' should show the appropriate 321 | parts of the General Public License. Of course, the commands you use may 322 | be called something other than `show w' and `show c'; they could even be 323 | mouse-clicks or menu items--whatever suits your program. 324 | 325 | You should also get your employer (if you work as a programmer) or your 326 | school, if any, to sign a "copyright disclaimer" for the program, if 327 | necessary. Here is a sample; alter the names: 328 | 329 | Yoyodyne, Inc., hereby disclaims all copyright interest in the program 330 | `Gnomovision' (which makes passes at compilers) written by James Hacker. 331 | 332 | {signature of Ty Coon}, 1 April 1989 333 | Ty Coon, President of Vice 334 | 335 | This General Public License does not permit incorporating your program into 336 | proprietary programs. If your program is a subroutine library, you may 337 | consider it more useful to permit linking proprietary applications with the 338 | library. If this is what you want to do, use the GNU Lesser General 339 | Public License instead of this License. 340 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # WeeklyCDCPlot 2 | Contains R files for making a Shiny app which displays CDC data by state and disease. 3 | 4 | -url_names.R creates urldat.csv, which contains the names of each disease of interest, and the url location for the data of that disease 5 | 6 | -old_new_merge.R uses the urldat.csv to scrape the data from the CDC website, and creates three .csv files: 7 | - plotdat.csv, which is the disease data scraped from the CDC 8 | -location_names.csv, which is a file with the names of all locations we have data for, and the type of each location (state, region, country) 9 | -disease_names.csv, which is a file with the names of all diseases we have data for 10 | Note: the last two .csv files are used only to help create the user interface for the shiny app 11 | 12 | 13 | -ui.R and server.R are the two .R files which run the shiny app using runApp() 14 | -------------------------------------------------------------------------------- /dates.txt: -------------------------------------------------------------------------------- 1 | "MMWR.Week" "y2011" "y2012" "y2013" "y2014" "y2015" "y2016" "y2017" 2 | "1" 1 "2011-01-08" "2012-01-07" "2013-01-05" "2014-01-04" "2015-01-10" "2016-01-09" "2017-01-07" 3 | "2" 2 "2011-01-15" "2012-01-14" "2013-01-12" "2014-01-11" "2015-01-17" "2016-01-16" "2017-01-14" 4 | "3" 3 "2011-01-22" "2012-01-21" "2013-01-19" "2014-01-18" "2015-01-24" "2016-01-23" "2017-01-21" 5 | "4" 4 "2011-01-29" "2012-01-28" "2013-01-26" "2014-01-25" "2015-01-31" "2016-01-30" "2017-01-28" 6 | "5" 5 "2011-02-05" "2012-02-04" "2013-02-02" "2014-02-01" "2015-02-07" "2016-02-06" "2017-02-04" 7 | "6" 6 "2011-02-12" "2012-02-11" "2013-02-09" "2014-02-08" "2015-02-14" "2016-02-13" "2017-02-11" 8 | "7" 7 "2011-02-19" "2012-02-18" "2013-02-16" "2014-02-15" "2015-02-21" "2016-02-20" "2017-02-18" 9 | "8" 8 "2011-02-26" "2012-02-25" "2013-02-23" "2014-02-22" "2015-02-28" "2016-02-27" "2017-02-25" 10 | "9" 9 "2011-03-05" "2012-03-03" "2013-03-02" "2014-03-01" "2015-03-07" "2016-03-05" "2017-03-04" 11 | "10" 10 "2011-03-12" "2012-03-10" "2013-03-09" "2014-03-08" "2015-03-14" "2016-03-12" "2017-03-11" 12 | "11" 11 "2011-03-19" "2012-03-17" "2013-03-16" "2014-03-15" "2015-03-21" "2016-03-19" "2017-03-18" 13 | "12" 12 "2011-03-26" "2012-03-24" "2013-03-23" "2014-03-22" "2015-03-28" "2016-03-26" "2017-03-25" 14 | "13" 13 "2011-04-02" "2012-03-31" "2013-03-30" "2014-03-29" "2015-04-04" "2016-04-02" "2017-04-01" 15 | "14" 14 "2011-04-09" "2012-04-07" "2013-04-06" "2014-04-05" "2015-04-11" "2016-04-09" "2017-04-08" 16 | "15" 15 "2011-04-16" "2012-04-14" "2013-04-13" "2014-04-12" "2015-04-18" "2016-04-16" "2017-04-15" 17 | "16" 16 "2011-04-23" "2012-04-21" "2013-04-20" "2014-04-19" "2015-04-25" "2016-04-23" "2017-04-22" 18 | "17" 17 "2011-04-30" "2012-04-28" "2013-04-27" "2014-04-26" "2015-05-02" "2016-04-30" "2017-04-29" 19 | "18" 18 "2011-05-07" "2012-05-05" "2013-05-04" "2014-05-03" "2015-05-09" "2016-05-07" "2017-05-06" 20 | "19" 19 "2011-05-14" "2012-05-12" "2013-05-11" "2014-05-10" "2015-05-16" "2016-05-14" "2017-05-13" 21 | "20" 20 "2011-05-21" "2012-05-19" "2013-05-18" "2014-05-17" "2015-05-23" "2016-05-21" "2017-05-20" 22 | "21" 21 "2011-05-28" "2012-05-26" "2013-05-25" "2014-05-24" "2015-05-30" "2016-05-28" "2017-05-27" 23 | "22" 22 "2011-06-04" "2012-06-02" "2013-06-01" "2014-05-31" "2015-06-06" "2016-06-04" "2017-06-03" 24 | "23" 23 "2011-06-11" "2012-06-09" "2013-06-08" "2014-06-07" "2015-06-13" "2016-06-11" "2017-06-10" 25 | "24" 24 "2011-06-18" "2012-06-16" "2013-06-15" "2014-06-14" "2015-06-20" "2016-06-18" "2017-06-17" 26 | "25" 25 "2011-06-25" "2012-06-23" "2013-06-22" "2014-06-21" "2015-06-27" "2016-06-25" "2017-06-24" 27 | "26" 26 "2011-07-02" "2012-06-30" "2013-06-29" "2014-06-28" "2015-07-04" "2016-07-02" "2017-07-01" 28 | "27" 27 "2011-07-09" "2012-07-07" "2013-07-06" "2014-07-05" "2015-07-11" "2016-07-09" "2017-07-08" 29 | "28" 28 "2011-07-16" "2012-07-14" "2013-07-13" "2014-07-12" "2015-07-18" "2016-07-16" "2017-07-15" 30 | "29" 29 "2011-07-23" "2012-07-21" "2013-07-20" "2014-07-19" "2015-07-25" "2016-07-23" "2017-07-22" 31 | "30" 30 "2011-07-30" "2012-07-28" "2013-07-27" "2014-07-26" "2015-08-01" "2016-07-30" "2017-07-29" 32 | "31" 31 "2011-08-06" "2012-08-04" "2013-08-03" "2014-08-02" "2015-08-08" "2016-08-06" "2017-08-05" 33 | "32" 32 "2011-08-13" "2012-08-11" "2013-08-10" "2014-08-09" "2015-08-15" "2016-08-13" "2017-08-12" 34 | "33" 33 "2011-08-20" "2012-08-18" "2013-08-17" "2014-08-16" "2015-08-22" "2016-08-20" "2017-08-19" 35 | "34" 34 "2011-08-27" "2012-08-25" "2013-08-24" "2014-08-23" "2015-08-29" "2016-08-27" "2017-08-26" 36 | "35" 35 "2011-09-03" "2012-09-01" "2013-08-31" "2014-08-30" "2015-09-05" "2016-09-03" "2017-09-02" 37 | "36" 36 "2011-09-10" "2012-09-08" "2013-09-07" "2014-09-06" "2015-09-12" "2016-09-10" "2017-09-09" 38 | "37" 37 "2011-09-17" "2012-09-15" "2013-09-14" "2014-09-13" "2015-09-19" "2016-09-17" "2017-09-16" 39 | "38" 38 "2011-09-24" "2012-09-22" "2013-09-21" "2014-09-20" "2015-09-26" "2016-09-24" "2017-09-23" 40 | "39" 39 "2011-10-01" "2012-09-29" "2013-09-28" "2014-09-27" "2015-10-03" "2016-10-01" "2017-09-30" 41 | "40" 40 "2011-10-08" "2012-10-06" "2013-10-05" "2014-10-04" "2015-10-10" "2016-10-08" "2017-10-07" 42 | "41" 41 "2011-10-15" "2012-10-13" "2013-10-12" "2014-10-11" "2015-10-17" "2016-10-15" "2017-10-14" 43 | "42" 42 "2011-10-22" "2012-10-20" "2013-10-19" "2014-10-18" "2015-10-24" "2016-10-22" "2017-10-21" 44 | "43" 43 "2011-10-29" "2012-10-27" "2013-10-26" "2014-10-25" "2015-10-31" "2016-10-29" "2017-10-28" 45 | "44" 44 "2011-11-05" "2012-11-03" "2013-11-02" "2014-11-01" "2015-11-07" "2016-11-05" "2017-11-04" 46 | "45" 45 "2011-11-12" "2012-11-10" "2013-11-09" "2014-11-08" "2015-11-14" "2016-11-12" "2017-11-11" 47 | "46" 46 "2011-11-19" "2012-11-17" "2013-11-16" "2014-11-15" "2015-11-21" "2016-11-19" "2017-11-18" 48 | "47" 47 "2011-11-26" "2012-11-24" "2013-11-23" "2014-11-22" "2015-11-28" "2016-11-26" "2017-11-25" 49 | "48" 48 "2011-12-03" "2012-12-01" "2013-11-30" "2014-11-29" "2015-12-05" "2016-12-03" "2017-12-02" 50 | "49" 49 "2011-12-10" "2012-12-08" "2013-12-07" "2014-12-06" "2015-12-12" "2016-12-10" "2017-12-09" 51 | "50" 50 "2011-12-17" "2012-12-15" "2013-12-14" "2014-12-13" "2015-12-19" "2016-12-17" "2017-12-16" 52 | "51" 51 "2011-12-24" "2012-12-22" "2013-12-21" "2014-12-20" "2015-12-26" "2016-12-24" "2017-12-23" 53 | "52" 52 "2011-12-31" "2012-12-29" "2013-12-28" "2014-12-27" "2016-01-02" "2016-12-31" "2017-12-30" 54 | "53" 53 NA NA NA "2015-01-03" NA "" "" 55 | -------------------------------------------------------------------------------- /disease_names.txt: -------------------------------------------------------------------------------- 1 | "x" 2 | "Cryptosporidiosis" 3 | "Giardiasis" 4 | "Hepatitis A" 5 | "Hepatitis B, Acute" 6 | "Hepatitis C, Acute" 7 | "Legionellosis" 8 | "Malaria" 9 | "Meningococcal Disease Invasive (all serogroups)" 10 | "Mumps" 11 | "P&I MORT" 12 | "Pertussis" 13 | "Salmonellosis" 14 | "Shiga toxin-producing E. coli (STEC)" 15 | "Shigellosis" 16 | -------------------------------------------------------------------------------- /inf_dis.txt: -------------------------------------------------------------------------------- 1 | "x" 2 | "Anthrax" 3 | "Arbo,CA serogroup" 4 | "Arbo,EEE" 5 | "Arbo,Powassan" 6 | "Arbo,St Louis" 7 | "Arbo,WEE" 8 | "Botulism foodborne" 9 | "Botulism infant" 10 | "Botulism other" 11 | "Botulism total" 12 | "Brucellosis" 13 | "Chancroid" 14 | "Cholera" 15 | "Cyclosporiasis" 16 | "Diphtheria" 17 | "H flu <5 non-b" 18 | "H flu <5 b" 19 | "H flu <5 unknown" 20 | "Hansen Disease" 21 | "HUS,postdiarrheal" 22 | "HBV,perinatal" 23 | "Influenza ped mort" 24 | "Leptospirosis" 25 | "Listeriosis" 26 | "Measles" 27 | "Mening a,c,y,w-135" 28 | "Mening other" 29 | "Mening serogroup b" 30 | "Mening unknown" 31 | "Novel influenza A" 32 | "Plague" 33 | "Polio nonparalytic" 34 | "Poliomyelitis paralytic" 35 | "Psittacosis" 36 | "Q fever acute" 37 | "Q fever chronic" 38 | "Q Fever, Total" 39 | "Rabies human" 40 | "SARS-CoV" 41 | "Smallpox" 42 | "Strep toxic shock synd" 43 | "Toxic shock synd staph" 44 | "Toxic-shock syndrome (staphylococcal)" 45 | "Trichinellosis" 46 | "Tularemia" 47 | "Typhoid fever" 48 | "Vanco Interm Staph A" 49 | "Vanco Resist Staph A" 50 | "Viral hemorrhagic fever Crimean-Congo hemorrhagic fever" 51 | "Viral hemorrhagic fever Ebola hemorrhagic fever" 52 | "Viral hemorrhagic fever Guanarito hemorrhagic fever" 53 | "Viral hemorrhagic fever Junin hemorrhagic fever" 54 | "Viral hemorrhagic fever Lassa fever" 55 | "Viral hemorrhagic fever Lujo virus" 56 | "Viral hemorrhagic fever Machupo hemorrhagic fever" 57 | "Viral hemorrhagic fever Marburg fever" 58 | "Viral hemorrhagic fever Sabia-associated hemorrhagic fever" 59 | "Yellow fever" 60 | -------------------------------------------------------------------------------- /location_names.txt: -------------------------------------------------------------------------------- 1 | "location" "type" "region" 2 | "UNITED STATES" "country" "NONE" 3 | "NEW ENGLAND" "region" "NONE" 4 | "CONNECTICUT" "state" "NEW ENGLAND" 5 | "MAINE" "state" "NEW ENGLAND" 6 | "MASSACHUSETTS" "state" "NEW ENGLAND" 7 | "NEW HAMPSHIRE" "state" "NEW ENGLAND" 8 | "RHODE ISLAND" "state" "NEW ENGLAND" 9 | "VERMONT" "state" "NEW ENGLAND" 10 | "MID. ATLANTIC" "region" "NONE" 11 | "NEW JERSEY" "state" "MID. ATLANTIC" 12 | "NEW YORK" "state" "MID. ATLANTIC" 13 | "NEW YORK CITY" "state" "MID. ATLANTIC" 14 | "PENNSYLVANIA" "state" "MID. ATLANTIC" 15 | "E.N. CENTRAL" "region" "NONE" 16 | "ILLINOIS" "state" "E.N. CENTRAL" 17 | "INDIANA" "state" "E.N. CENTRAL" 18 | "MICHIGAN" "state" "E.N. CENTRAL" 19 | "OHIO" "state" "E.N. CENTRAL" 20 | "WISCONSIN" "state" "E.N. CENTRAL" 21 | "W.N. CENTRAL" "region" "NONE" 22 | "IOWA" "state" "W.N. CENTRAL" 23 | "KANSAS" "state" "W.N. CENTRAL" 24 | "MINNESOTA" "state" "W.N. CENTRAL" 25 | "MISSOURI" "state" "W.N. CENTRAL" 26 | "NEBRASKA" "state" "W.N. CENTRAL" 27 | "NORTH DAKOTA" "state" "W.N. CENTRAL" 28 | "SOUTH DAKOTA" "state" "W.N. CENTRAL" 29 | "S. ATLANTIC" "region" "NONE" 30 | "DELAWARE" "state" "S. ATLANTIC" 31 | "DIST. OF COL." "state" "S. ATLANTIC" 32 | "FLORIDA" "state" "S. ATLANTIC" 33 | "GEORGIA" "state" "S. ATLANTIC" 34 | "MARYLAND" "state" "S. ATLANTIC" 35 | "NORTH CAROLINA" "state" "S. ATLANTIC" 36 | "SOUTH CAROLINA" "state" "S. ATLANTIC" 37 | "VIRGINIA" "state" "S. ATLANTIC" 38 | "WEST VIRGINIA" "state" "S. ATLANTIC" 39 | "E.S. CENTRAL" "region" "NONE" 40 | "ALABAMA" "state" "E.S. CENTRAL" 41 | "KENTUCKY" "state" "E.S. CENTRAL" 42 | "MISSISSIPPI" "state" "E.S. CENTRAL" 43 | "TENNESSEE" "state" "E.S. CENTRAL" 44 | "W.S. CENTRAL" "region" "NONE" 45 | "ARKANSAS" "state" "W.S. CENTRAL" 46 | "LOUISIANA" "state" "W.S. CENTRAL" 47 | "OKLAHOMA" "state" "W.S. CENTRAL" 48 | "TEXAS" "state" "W.S. CENTRAL" 49 | "MOUNTAIN" "region" "NONE" 50 | "ARIZONA" "state" "MOUNTAIN" 51 | "COLORADO" "state" "MOUNTAIN" 52 | "IDAHO" "state" "MOUNTAIN" 53 | "MONTANA" "state" "MOUNTAIN" 54 | "NEVADA" "state" "MOUNTAIN" 55 | "NEW MEXICO" "state" "MOUNTAIN" 56 | "UTAH" "state" "MOUNTAIN" 57 | "WYOMING" "state" "MOUNTAIN" 58 | "PACIFIC" "region" "NONE" 59 | "ALASKA" "state" "PACIFIC" 60 | "CALIFORNIA" "state" "PACIFIC" 61 | "HAWAII" "state" "PACIFIC" 62 | "OREGON" "state" "PACIFIC" 63 | "WASHINGTON" "state" "PACIFIC" 64 | "AMER. SAMOA" "state" "TERRITORIES" 65 | "C.N.M.I." "state" "TERRITORIES" 66 | "GUAM" "state" "TERRITORIES" 67 | "PUERTO RICO" "state" "TERRITORIES" 68 | "VIRGIN ISL." "state" "TERRITORIES" 69 | -------------------------------------------------------------------------------- /old_new_merge.R: -------------------------------------------------------------------------------- 1 | library(plyr) 2 | library(RCurl) 3 | library(dplyr) 4 | library(caTools) 5 | library(lubridate) 6 | 7 | # Read in data with disease names and corresponding urls. This data is created from the url_names.R file, which should be run first. 8 | urldat <- read.table("urldat.txt", header=T) 9 | dates <- read.table("dates.txt", header=T) 10 | 11 | # A function to help deal with NA values when calculating thresholds. NA's occur when we try to 12 | # calculate running standard deviations with only one data point, and cause an error in the cumsum function. 13 | # Args: 14 | # x: A vector of disease occurance data that we wish to calculate an alert threshold for 15 | # days: an integer for the number of days to calculate the threshold over 16 | 17 | newthresh <- function(x,days){ 18 | thresh <-runmean(x,days, align="right")+2*runsd(x, days, align="right") 19 | thresh[is.na(thresh)]<-x[is.na(thresh)] 20 | return(thresh) 21 | } 22 | # A function to convert MMWR week and Year info to calendar dates, uses dates.txt file 23 | # 24 | # Args: 25 | # x: a data.frame with columns titled "MMWR.Week" and "MMWR.Year" 26 | 27 | getdate <- function(x){ 28 | if(!is.na(x['MMWR.Week'])){ 29 | return(filter(dates, MMWR.Week==as.numeric(x['MMWR.Week']))[5+as.numeric(x['MMWR.Year'])-2014][[1]]) 30 | } 31 | return(filter(dates, MMWR.Week==as.numeric(x['MMWR.week']))[5+as.numeric(x['MMWR.year'])-2014][[1]]) 32 | } 33 | 34 | # This function takes each url and corresponding disease name and gets data from CDC. It then combines multiple years worth of data, 35 | # calculates alert thresholds and cumulative sums and returns the columns of interest from the CDC data. 36 | # Args: 37 | # url_data: the rows of the url_data.txt file which contain the urls for a given disease 38 | 39 | url_func <- function(url_data){ 40 | # Construct actual CDC website url name and get data for 2014 and 2015 41 | curl <- url_data$url 42 | URL <- paste( "https://data.cdc.gov/api/views/",curl, "/rows.csv?accessType=DOWNLOAD",sep="") 43 | 44 | nndss14 <-read.csv(textConnection(getURL(URL[1],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 45 | nndss15 <- read.csv(textConnection(getURL(URL[2],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 46 | nndss16 <- read.csv(textConnection(getURL(URL[3],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 47 | 48 | # Some diseases have a slightly different name for MMWR.Week and MMWR.Year, so we standardize the names here 49 | if("MMWRWeek"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14,MMWR.Week=MMWRWeek )} 50 | if("MMWR.WEEK"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14,MMWR.Week=MMWR.WEEK )} 51 | if("MMWRYear"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14, MMWR.Year=MMWRYear )} 52 | if("MMWR.YEAR"%in%names(nndss14)){nndss14<- dplyr::rename(nndss14, MMWR.Year=MMWR.YEAR )} 53 | if("MMWRWeek"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Week=MMWRWeek )} 54 | if("MMWR.WEEK"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Week=MMWR.WEEK )} 55 | if("MMWRYear"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Year=MMWRYear )} 56 | if("MMWR.YEAR"%in%names(nndss15)){nndss15<- dplyr::rename(nndss15, MMWR.Year=MMWR.YEAR )} 57 | if("MMWRWeek"%in%names(nndss16)){nndss16<- dplyr::rename(nndss16, MMWR.Week=MMWRWeek )} 58 | if("MMWR.WEEK"%in%names(nndss16)){nndss16<- dplyr::rename(nndss16, MMWR.Week=MMWR.WEEK )} 59 | if("MMWRYear"%in%names(nndss16)){nndss16<- dplyr::rename(nndss16, MMWR.Year=MMWRYear )} 60 | if("MMWR.YEAR"%in%names(nndss16)){nndss16<- dplyr::rename(nndss16, MMWR.Year=MMWR.YEAR )} 61 | 62 | # dname is the name of the column in the nndss file which contains weekly data for the disease of interest 63 | dname <- c(paste(url_data$data_name[1],"..Current.week",sep="")) 64 | 65 | #special column name for P&I mortality data 66 | if(url_data$data_name[1]=="P&I MORT")dname <- "P.I..Total" 67 | 68 | # Select relevant columns from both the 2014 and 2015 data and rbind them together 69 | nndss <- rbind(select(nndss14, contains(dname), contains("MMWR"), contains("Reporting"), -contains("flag")), 70 | select(nndss15, contains(dname), contains("MMWR"), contains("Reporting"), -contains("flag")), 71 | select(nndss16, contains(dname), contains("MMWR"), contains("Reporting"), -contains("flag"))) 72 | 73 | # set NA values to 0, maybe not a great idea, but useful for calculating thresholds and cumulative sums 74 | names(nndss)[which(dname==names(nndss))] <- "c" 75 | nndss$c <- as.numeric(nndss$c) 76 | nndss$c[is.na(nndss$c)]<-0 77 | nndss$display_name <- url_data$display_name[1] 78 | 79 | #Create columns for 10 and 14 week thresholds and 10 and 14 week alerts, grouping by reporting area. 80 | nndss<- group_by(nndss, Reporting.Area) %>% do(mutate(., fourteenwk.thresh=newthresh(c,14), 81 | tenwk.thresh=newthresh(c,10), 82 | fourteenwk.alert=c>fourteenwk.thresh, 83 | tenwk.alert=c>tenwk.thresh 84 | )) 85 | 86 | 87 | # Create columns for cumulative sum along with cumulative threshold values, grouping both by reporting area and year 88 | nndss <- group_by(nndss, Reporting.Area, MMWR.Year) %>% do(mutate(., ycumulate=cumsum(c), 89 | ycumu10=ycumulate+(tenwk.thresh-c), 90 | ycumu14=ycumulate+(fourteenwk.thresh-c))) 91 | # Add date information for the MMWR week/year combination 92 | nndss$date<- apply(nndss, 1, getdate) 93 | 94 | #select and return relevant columns of data table 95 | nndss<- select(nndss, one_of("c","Reporting.Area", "MMWR.Year", "MMWR.Week","display_name","date"),contains("cumu"), contains("alert"),contains("thresh")) 96 | return(nndss) 97 | } 98 | 99 | # Run the url_func function for each different disease name in our urldat.txt data file. Use re-encoding to remove some 100 | # illegible characters 101 | output <- ddply(urldat, .(data_name), url_func) 102 | output$Reporting.Area <- as.character(output$Reporting.Area) 103 | Encoding(output$Reporting.Area) <- "latin1" 104 | output$Reporting.Area <- iconv(output$Reporting.Area, "latin1", "ASCII", sub="") 105 | 106 | # Write output as plotdat.csv 107 | beta <- read.table("oldplotdat.txt", header=T, sep=" ") 108 | beta$group <- "A" 109 | betcdc <- output 110 | betcdc <- dplyr::select(betcdc, data_name, display_name, MMWR.Week, Reporting.Area,c, MMWR.Year, 111 | fourteenwk.thresh,fourteenwk.alert, ycumulate, ycumu14) 112 | 113 | betcdc$group <- "B" 114 | betcdc <- rbind(beta,betcdc) 115 | 116 | datetrans <- read.table("week.csv", header=T, sep=",") 117 | 118 | betcdc$MMWR.Week[betcdc$MMWR.Week<10] <- paste(0, dplyr::filter(betcdc, MMWR.Week<10)$MMWR.Week, sep="") 119 | betcdc$tempdt <- as.integer(paste(betcdc$MMWR.Year, betcdc$MMWR.Week, sep="")) 120 | betcdc$rdate <- unlist(sapply(betcdc$tempdt, function(x){return(as.character(datetrans[which(datetrans$Week==x),]$Date_Week))}, simplify=TRUE)) 121 | 122 | 123 | 124 | betcdc$year <- year(as.Date(betcdc$rdate)) 125 | betcdc$week <- format(as.Date(betcdc$rdate),"%m/%d") 126 | 127 | write.table(betcdc, file="plotdat.txt", row.names=FALSE, col.names=TRUE) 128 | 129 | 130 | # Separate output file which contains all disease names called disease_names.csv 131 | write.table(unique(output$display_name), file="disease_names.txt", row.names=FALSE, col.names=TRUE) 132 | 133 | 134 | URL <- paste( "https://data.cdc.gov/api/views/",urldat$url[1], "/rows.csv?accessType=DOWNLOAD",sep="") 135 | names <- unique(read.csv(textConnection(getURL(URL,ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F)$Reporting.Area) 136 | # Separate output file which contains locations and location types (state, region, or country) called location_names.cdv 137 | regions <-c("NEW ENGLAND", "MID. ATLANTIC", "E.N. CENTRAL", "W.N. CENTRAL", "S. ATLANTIC", 138 | "E.S. CENTRAL", "W.S. CENTRAL", "MOUNTAIN", "PACIFIC", "TERRITORIES") 139 | loc_type <- rep("state", length(names)) 140 | loc_type[which(names%in%regions)] <- "region" 141 | loc_type[1] <- "country" 142 | 143 | # Also include, for state locations, which region the state falls under. Thankfully, the CDC data table is ordered so that it first lists a region, then 144 | # all the states in that region, then the next region, and so on. So, between each region name, all states will be in the same region 145 | region_num=0 146 | loc_reg <- rep("NONE", length(loc_type)) 147 | for(i in 1:62){ 148 | if(loc_type[i]=="region"){ 149 | region_num = region_num+1 150 | } 151 | if(loc_type[i]=="state"){ 152 | loc_reg[i]=regions[region_num] 153 | } 154 | } 155 | loc_reg[63:67] <- "TERRITORIES" 156 | all_locs<-data.frame(location=names,type=loc_type, region=loc_reg) 157 | write.table(all_locs, file="location_names.txt", row.names=FALSE, col.names=TRUE) 158 | 159 | ##now a location file for P&I mortality data 160 | URL <- URL <- paste( "https://data.cdc.gov/api/views/7esm-uptm/rows.csv?accessType=DOWNLOAD",sep="") 161 | pi_names <- unique(read.csv(textConnection(getURL(URL,ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F)$Reporting.Area) 162 | Encoding(pi_names) <- "latin1" 163 | pi_names <- iconv(pi_names, "latin1", "ASCII", sub="") 164 | pi_names[length(pi_names)]<-"Total" 165 | pi_names <- as.character(pi_names) 166 | loc_type <- rep("city", length(pi_names)) 167 | loc_type[which(tolower(pi_names)%in%tolower(regions))] <- "region" 168 | loc_type[length(pi_names)]<-"total" 169 | city_st<- sapply(strsplit(pi_names[loc_type=="city"], ",", fixed=T,), function(x){return(state.name[which(state.abb==gsub(" ", "",x[2],fixed=T))])}) 170 | city_st[[75]] <- NA 171 | city_st <- unlist(city_st) 172 | rcity_st <- rep(NA, length(loc_type)) 173 | rcity_st[which(loc_type=="city")]<-city_st 174 | # Also include, for state locations, which region the state falls under. Thankfully, the CDC data table is ordered so that it first lists a region, then 175 | # all the states in that region, then the next region, and so on. So, between each region name, all states will be in the same region 176 | region_num=0 177 | loc_reg <- rep("NONE", length(loc_type)) 178 | for(i in 1:length(pi_names)){ 179 | if(loc_type[i]=="region"){ 180 | region_num = region_num+1 181 | } 182 | if(loc_type[i]=="city"){ 183 | loc_reg[i]=regions[region_num] 184 | } 185 | } 186 | all_locs<-data.frame(location=pi_names,type=loc_type, region=loc_reg, state=rcity_st) 187 | write.table(all_locs, file="pi_names.txt", row.names=FALSE, col.names=TRUE) 188 | 189 | #separate code for infrequent diseases. 190 | URL <- c("https://data.cdc.gov/api/views/wcwi-x3uk/rows.csv?accessType=DOWNLOAD", 191 | "https://data.cdc.gov/api/views/pb4z-432k/rows.csv?accessType=DOWNLOAD", 192 | "https://data.cdc.gov/api/views/dwqk-w36f/rows.csv?accessType=DOWNLOAD" 193 | ) 194 | nndss14 <-read.csv(textConnection(getURL(URL[1],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 195 | nndss15 <- read.csv(textConnection(getURL(URL[2],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 196 | nndss16 <- read.csv(textConnection(getURL(URL[3],ssl.verifypeer=FALSE)),strip.white=T,stringsAsFactors=F) 197 | 198 | nndss <- rbind(select(nndss14, contains("Current.week"), contains("MMWR"), contains("Disease"), -contains("flag")), 199 | select(nndss15, contains("Current.week"),contains("MMWR"), contains("Disease"), -contains("flag")), 200 | select(nndss16, contains("Current.week"),contains("MMWR"), contains("Disease"), -contains("flag"))) 201 | 202 | 203 | #disease names are different bewteen years, try to clean some disease names up 204 | Encoding(nndss$Disease) <- "latin1" 205 | nndss$Disease <- iconv(nndss$Disease, "latin1", "ASCII", sub="") 206 | nndss$Disease <- gsub(":","",nndss$Disease) 207 | nndss$Disease <- gsub(",","",nndss$Disease) 208 | 209 | nndss$Disease <- gsub("*","",nndss$Disease, fixed=T) 210 | 211 | #remove all disease names which aren't present in both years 212 | nndss <- nndss[-which(nndss$Disease%in%names(which(table(nndss$Disease)<54))),] 213 | d <- nndss 214 | d$c <- d$Current.week 215 | d$c <- as.numeric(d$c) 216 | 217 | #calculate 14 week thresholds and alerts 218 | d <- d %>% mutate(c = ifelse(is.na(c),0,c)) 219 | d<- group_by(d, Disease) %>% do(mutate(., threshold=newthresh(c,14), 220 | alert=c>threshold, 221 | cumulate=cumsum(c), 222 | cumu14=cumulate+(threshold-c))) 223 | 224 | # Create columns for cumulative sum along with cumulative threshold values, grouping both by reporting area and year 225 | d <- group_by(d, Disease, MMWR.year) %>% do(mutate(., ycumulate=cumsum(c), 226 | ycumu14=ycumulate+(threshold-c))) 227 | #get dates 228 | d$date <- apply(d, 1, getdate) 229 | 230 | #rename some diseases 231 | d$Disease <- as.factor(d$Disease) 232 | levels(d$Disease)[3] <- "Arbo,EEE" 233 | levels(d$Disease)[2] <- "Arbo,CA serogroup" 234 | levels(d$Disease)[4] <- "Arbo,Powassan" 235 | levels(d$Disease)[5] <- "Arbo,St Louis" 236 | levels(d$Disease)[6] <- "Arbo,WEE" 237 | levels(d$Disease)[9] <- "Botulism other" 238 | levels(d$Disease)[14] <- "Cyclosporiasis" 239 | levels(d$Disease)[16] <- "H flu <5 non-b" 240 | levels(d$Disease)[17] <- "H flu <5 b" 241 | levels(d$Disease)[18] <- "H flu <5 unknown" 242 | levels(d$Disease)[19] <- "Hansen Disease" 243 | levels(d$Disease)[20] <- "HUS,postdiarrheal" 244 | levels(d$Disease)[21] <- "HBV,perinatal" 245 | levels(d$Disease)[22] <- "Influenza ped mort" 246 | levels(d$Disease)[25] <- "Measles" 247 | levels(d$Disease)[26] <- "Mening a,c,y,w-135" 248 | levels(d$Disease)[27] <- "Mening other" 249 | levels(d$Disease)[28] <- "Mening serogroup b" 250 | levels(d$Disease)[29] <- "Mening unknown" 251 | levels(d$Disease)[30] <- "Novel influenza A" 252 | levels(d$Disease)[32] <- "Polio nonparalytic" 253 | levels(d$Disease)[34] <- "Psittacosis" 254 | levels(d$Disease)[37] <- "Q Fever, Total" 255 | levels(d$Disease)[39] <- "SARS-CoV" 256 | levels(d$Disease)[40] <- "Smallpox" 257 | levels(d$Disease)[41] <- "Strep toxic shock synd" 258 | levels(d$Disease)[42] <- "Syphilis congenital <1yr" 259 | levels(d$Disease)[42] <- "Toxic shock synd staph" 260 | levels(d$Disease)[47] <- "Vanco Interm Staph A" 261 | levels(d$Disease)[48] <- "Vanco Resist Staph A" 262 | 263 | d$alert[is.na(d$alert)]<-"N" 264 | 265 | write.table(d, file="infreq.txt", row.names=FALSE, col.names=TRUE) 266 | write.table(unique(d$Disease), file="inf_dis.txt",row.names=FALSE, col.names=TRUE) 267 | -------------------------------------------------------------------------------- /pi_names.txt: -------------------------------------------------------------------------------- 1 | "location" "type" "region" "state" 2 | "New England" "region" "NONE" NA 3 | "Boston, MA" "city" "NEW ENGLAND" "Massachusetts" 4 | "Bridgeport, CT" "city" "NEW ENGLAND" "Connecticut" 5 | "Cambridge, MA" "city" "NEW ENGLAND" "Massachusetts" 6 | "Fall River, MA" "city" "NEW ENGLAND" "Massachusetts" 7 | "Hartford, CT" "city" "NEW ENGLAND" "Connecticut" 8 | "Lowell, MA" "city" "NEW ENGLAND" "Massachusetts" 9 | "Lynn, MA" "city" "NEW ENGLAND" "Massachusetts" 10 | "New Bedford, MA" "city" "NEW ENGLAND" "Massachusetts" 11 | "New Haven, CT" "city" "NEW ENGLAND" "Connecticut" 12 | "Providence, RI" "city" "NEW ENGLAND" "Rhode Island" 13 | "Somerville, MA" "city" "NEW ENGLAND" "Massachusetts" 14 | "Springfield, MA" "city" "NEW ENGLAND" "Massachusetts" 15 | "Waterbury, CT" "city" "NEW ENGLAND" "Connecticut" 16 | "Worcester, MA" "city" "NEW ENGLAND" "Massachusetts" 17 | "Mid. Atlantic" "region" "NONE" NA 18 | "Albany, NY" "city" "MID. ATLANTIC" "New York" 19 | "Allentown, PA" "city" "MID. ATLANTIC" "Pennsylvania" 20 | "Buffalo, NY" "city" "MID. ATLANTIC" "New York" 21 | "Camden, NJ" "city" "MID. ATLANTIC" "New Jersey" 22 | "Elizabeth, NJ" "city" "MID. ATLANTIC" "New Jersey" 23 | "Erie, PA" "city" "MID. ATLANTIC" "Pennsylvania" 24 | "Jersey City, NJ" "city" "MID. ATLANTIC" "New Jersey" 25 | "New York City, NY" "city" "MID. ATLANTIC" "New York" 26 | "Newark, NJ" "city" "MID. ATLANTIC" "New Jersey" 27 | "Paterson, NJ" "city" "MID. ATLANTIC" "New Jersey" 28 | "Philadelphia, PA" "city" "MID. ATLANTIC" "Pennsylvania" 29 | "Pittsburgh, PA" "city" "MID. ATLANTIC" "Pennsylvania" 30 | "Reading, PA" "city" "MID. ATLANTIC" "Pennsylvania" 31 | "Rochester, NY" "city" "MID. ATLANTIC" "New York" 32 | "Schenectady, NY" "city" "MID. ATLANTIC" "New York" 33 | "Scranton, PA" "city" "MID. ATLANTIC" "Pennsylvania" 34 | "Syracuse, NY" "city" "MID. ATLANTIC" "New York" 35 | "Trenton, NJ" "city" "MID. ATLANTIC" "New Jersey" 36 | "Utica, NY" "city" "MID. ATLANTIC" "New York" 37 | "Yonkers, NY" "city" "MID. ATLANTIC" "New York" 38 | "E.N. Central" "region" "NONE" NA 39 | "Akron, OH" "city" "E.N. CENTRAL" "Ohio" 40 | "Canton, OH" "city" "E.N. CENTRAL" "Ohio" 41 | "Chicago, IL" "city" "E.N. CENTRAL" "Illinois" 42 | "Cincinnati, OH" "city" "E.N. CENTRAL" "Ohio" 43 | "Cleveland, OH" "city" "E.N. CENTRAL" "Ohio" 44 | "Columbus, OH" "city" "E.N. CENTRAL" "Ohio" 45 | "Dayton, OH" "city" "E.N. CENTRAL" "Ohio" 46 | "Detroit, MI" "city" "E.N. CENTRAL" "Michigan" 47 | "Evansville, IN" "city" "E.N. CENTRAL" "Indiana" 48 | "Fort Wayne, IN" "city" "E.N. CENTRAL" "Indiana" 49 | "Gary, IN" "city" "E.N. CENTRAL" "Indiana" 50 | "Grand Rapids, MI" "city" "E.N. CENTRAL" "Michigan" 51 | "Indianapolis, IN" "city" "E.N. CENTRAL" "Indiana" 52 | "Lansing, MI" "city" "E.N. CENTRAL" "Michigan" 53 | "Milwaukee, WI" "city" "E.N. CENTRAL" "Wisconsin" 54 | "Peoria, IL" "city" "E.N. CENTRAL" "Illinois" 55 | "Rockford, IL" "city" "E.N. CENTRAL" "Illinois" 56 | "South Bend, IN" "city" "E.N. CENTRAL" "Indiana" 57 | "Toledo, OH" "city" "E.N. CENTRAL" "Ohio" 58 | "Youngstown, OH" "city" "E.N. CENTRAL" "Ohio" 59 | "W.N. Central" "region" "NONE" NA 60 | "Des Moines, IA" "city" "W.N. CENTRAL" "Iowa" 61 | "Duluth, MN" "city" "W.N. CENTRAL" "Minnesota" 62 | "Kansas City, KS" "city" "W.N. CENTRAL" "Kansas" 63 | "Kansas City, MO" "city" "W.N. CENTRAL" "Missouri" 64 | "Lincoln, NC" "city" "W.N. CENTRAL" "North Carolina" 65 | "Minneapolis, MN" "city" "W.N. CENTRAL" "Minnesota" 66 | "Omaha, NE" "city" "W.N. CENTRAL" "Nebraska" 67 | "St. Louis, MO" "city" "W.N. CENTRAL" "Missouri" 68 | "St. Paul, MN" "city" "W.N. CENTRAL" "Minnesota" 69 | "Wichita, KS" "city" "W.N. CENTRAL" "Kansas" 70 | "S. Atlantic" "region" "NONE" NA 71 | "Atlanta, GA" "city" "S. ATLANTIC" "Georgia" 72 | "Baltimore, MD" "city" "S. ATLANTIC" "Maryland" 73 | "Charlotte, NC" "city" "S. ATLANTIC" "North Carolina" 74 | "Jacksonville, FL" "city" "S. ATLANTIC" "Florida" 75 | "Miami, FL" "city" "S. ATLANTIC" "Florida" 76 | "Norfolk, VA" "city" "S. ATLANTIC" "Virginia" 77 | "Richmond, VA" "city" "S. ATLANTIC" "Virginia" 78 | "Savannah, GA" "city" "S. ATLANTIC" "Georgia" 79 | "St. Petersburg, FL" "city" "S. ATLANTIC" "Florida" 80 | "Tampa, FL" "city" "S. ATLANTIC" "Florida" 81 | "Washington, D.C." "city" "S. ATLANTIC" NA 82 | "Wilmington, DE" "city" "S. ATLANTIC" "Delaware" 83 | "E.S. Central" "region" "NONE" NA 84 | "Birmingham, AL" "city" "E.S. CENTRAL" "Alabama" 85 | "Chattanooga, TN" "city" "E.S. CENTRAL" "Tennessee" 86 | "Knoxville, TN" "city" "E.S. CENTRAL" "Tennessee" 87 | "Lexington, KY" "city" "E.S. CENTRAL" "Kentucky" 88 | "Memphis, TN" "city" "E.S. CENTRAL" "Tennessee" 89 | "Mobile, AL" "city" "E.S. CENTRAL" "Alabama" 90 | "Montgomery, AL" "city" "E.S. CENTRAL" "Alabama" 91 | "Nashville, TN" "city" "E.S. CENTRAL" "Tennessee" 92 | "W.S. Central" "region" "NONE" NA 93 | "Austin, TX" "city" "W.S. CENTRAL" "Texas" 94 | "Baton Rouge, LA" "city" "W.S. CENTRAL" "Louisiana" 95 | "Corpus Christi, TX" "city" "W.S. CENTRAL" "Texas" 96 | "Dallas, TX" "city" "W.S. CENTRAL" "Texas" 97 | "El Paso, TX" "city" "W.S. CENTRAL" "Texas" 98 | "Fort Worth, TX" "city" "W.S. CENTRAL" "Texas" 99 | "Houston, TX" "city" "W.S. CENTRAL" "Texas" 100 | "Little Rock, AR" "city" "W.S. CENTRAL" "Arkansas" 101 | "New Orleans, LA" "city" "W.S. CENTRAL" "Louisiana" 102 | "San Antonio, TX" "city" "W.S. CENTRAL" "Texas" 103 | "Shreveport, LA" "city" "W.S. CENTRAL" "Louisiana" 104 | "Tulsa, OK" "city" "W.S. CENTRAL" "Oklahoma" 105 | "Mountain" "region" "NONE" NA 106 | "Albuquerque, NM" "city" "MOUNTAIN" "New Mexico" 107 | "Boise, ID" "city" "MOUNTAIN" "Idaho" 108 | "Colorado Springs, CO" "city" "MOUNTAIN" "Colorado" 109 | "Denver, CO" "city" "MOUNTAIN" "Colorado" 110 | "Las Vegas, NV" "city" "MOUNTAIN" "Nevada" 111 | "Ogden, UT" "city" "MOUNTAIN" "Utah" 112 | "Phoenix, AZ" "city" "MOUNTAIN" "Arizona" 113 | "Pueblo, CO" "city" "MOUNTAIN" "Colorado" 114 | "Salt Lake City, UT" "city" "MOUNTAIN" "Utah" 115 | "Tucson, AZ" "city" "MOUNTAIN" "Arizona" 116 | "Pacific" "region" "NONE" NA 117 | "Berkeley, CA" "city" "PACIFIC" "California" 118 | "Fresno, CA" "city" "PACIFIC" "California" 119 | "Glendale, CA" "city" "PACIFIC" "California" 120 | "Honolulu, HI" "city" "PACIFIC" "Hawaii" 121 | "Long Beach, CA" "city" "PACIFIC" "California" 122 | "Los Angeles, CA" "city" "PACIFIC" "California" 123 | "Pasadena, CA" "city" "PACIFIC" "California" 124 | "Portland, OR" "city" "PACIFIC" "Oregon" 125 | "Sacramento, CA" "city" "PACIFIC" "California" 126 | "San Diego, CA" "city" "PACIFIC" "California" 127 | "San Francisco, CA" "city" "PACIFIC" "California" 128 | "San Jose, CA" "city" "PACIFIC" "California" 129 | "Santa Cruz, CA" "city" "PACIFIC" "California" 130 | "Seattle, WA" "city" "PACIFIC" "Washington" 131 | "Spokane, WA" "city" "PACIFIC" "Washington" 132 | "Tacoma, WA" "city" "PACIFIC" "Washington" 133 | "Total" "total" "NONE" NA 134 | -------------------------------------------------------------------------------- /server.R: -------------------------------------------------------------------------------- 1 | library(shiny) 2 | library(ggplot2) 3 | library(dplyr) 4 | library(scales) 5 | library(grid) 6 | ## Sometimes as different options are chosen, Shiny tries to create a plot without first reading in new user input, 7 | ## which can cause ggplot to throw an error. For that reason, there's a lot of code like if(is.null(input$locty)) return() 8 | ## This code checks to see if the correct input has been read yet, and if not, it prevents ggplot from trying to plot anything. 9 | 10 | # Load in cdc data, P&I data, and infreq disease data along with location names. 11 | cdcdata <- read.table("plotdat.txt", header=T) 12 | # Have to convert some region names to all uppercase since they were recorded differently. May want 13 | # to move this into CDCScrape.R 14 | location_names <- read.table("location_names.txt", header=T, colClasses=c("character","character")) 15 | cdcdata$year <- as.factor(cdcdata$year) 16 | cdcdata$MMWR.Year <- as.factor(cdcdata$MMWR.Year) 17 | cdcdata$Reporting.Area[toupper(cdcdata$Reporting.Area)%in%location_names$region]<- toupper(cdcdata$Reporting.Area[toupper(cdcdata$Reporting.Area)%in%location_names$region]) 18 | cdcdata$rdate <- as.Date(cdcdata$rdate) 19 | cdcdata$week <- as.Date(cdcdata$week, format="%m/%d") 20 | 21 | pi_names <- read.table("pi_names.txt", header=T,colClasses=c("character","character")) 22 | pi_names$location[which(pi_names$type=="region")]<- toupper(pi_names$location[which(pi_names$type=="region")]) 23 | 24 | 25 | infreq <- read.table("infreq.txt", header=T) 26 | infreq$MMWR.year <- as.factor(infreq$MMWR.year) 27 | infreq$date <- as.Date(infreq$date) 28 | 29 | datetrans <- read.table("week.csv", header=T, sep=",") 30 | 31 | 32 | shinyServer(function(input, output, session) { 33 | 34 | # The first reactive element of the UI is a drop down menu which filters locations based on whether 35 | # the user has selected states, regions, states in regions, or country 36 | # if "all regions" is selected, no location choices are displayed 37 | output$location <- renderUI({ 38 | if(is.null(input$locty))return() 39 | switch(input$locty, 40 | "state" = return(selectInput('location_name', 'State Name',sort(filter(location_names, type=="state")$location))), 41 | "stregion" = return(selectInput('location_name', 'Region Name', sort(filter(location_names, type=="region")$location))), 42 | "region" = return(selectInput('location_name', 'Region Name', sort(filter(location_names, type=="region")$location))), 43 | "country" = return(selectInput('location_name', 'Country Name',sort(filter(location_names, type=="country")$location))) 44 | ) 45 | }) 46 | 47 | 48 | output$locationP <- renderUI({ 49 | if(is.null(input$loctyP))return() 50 | switch(input$loctyP, 51 | "city" = return(selectInput('location_nameP', 'City Name',sort(filter(pi_names, type=="city")$location))), 52 | "ctregion" = return(selectInput('location_nameP', 'Region Name', sort(filter(pi_names, type=="region")$location))), 53 | "regionP" = return(selectInput('location_nameP', 'Region Name', sort(filter(pi_names, type=="region")$location))), 54 | "stateP" = return(selectInput('location_nameP', 'State Name', sort(unique(filter(pi_names, type=="city")$state)))) 55 | ) 56 | }) 57 | 58 | # The second reactive element of the UI is a checkbox which forces the same y-axis scale for all plots. 59 | # This element only shows up if multiple plots are being shown. 60 | output$frees <- renderUI({ 61 | if(input$locty=="stregion"||input$locty=="aregion"){ 62 | return(checkboxInput('fixed_scales','Force same scale for y-axis', value=F)) 63 | } 64 | return() 65 | }) 66 | 67 | 68 | 69 | output$freesP <- renderUI({ 70 | if(!is.null(input$loctyP)){ 71 | if(input$loctyP=="ctregion"||input$loctyP=="aregionP"){ 72 | return(checkboxInput('fixed_scalesP','Force same scale for y-axis', value=F)) 73 | } 74 | } 75 | return() 76 | }) 77 | 78 | 79 | 80 | 81 | observe({ 82 | x <- input$disease_name 83 | miny <- min(filter(cdcdata, display_name==x)$rdate) 84 | updateDateRangeInput(session, "years", min = miny) 85 | }) 86 | 87 | 88 | # We select data to plot based on which location type and location was chosen. 89 | # The reactive function filters the data to return only rows from cdc data which correspond to either the state, 90 | # region, or country selected. For some reason, need to put in extra error check for the "states within region" option to prevent ggplot error message 91 | selectedData <- reactive({ 92 | if(input$locty=="aregion") return(filter(cdcdata, display_name == input$disease_name, Reporting.Area %in% location_names$location[which(location_names$type=="region")], 93 | rdate >= input$years[1], rdate<=input$years[2] )) 94 | 95 | if(is.null(input$location_name))return() 96 | if(input$locty=="state"||input$locty=="region"||input$locty=="country") return(filter(cdcdata, display_name == input$disease_name, Reporting.Area == input$location_name, 97 | rdate >= input$years[1], rdate<=input$years[2] )) 98 | if(input$locty=="stregion"){ 99 | if(!(input$location_name %in% location_names$region)){return()} 100 | return(filter(cdcdata, display_name == input$disease_name, Reporting.Area %in% location_names$location[location_names$region==input$location_name], 101 | rdate >= input$years[1], rdate<=input$years[2] ))} 102 | }) 103 | 104 | 105 | selectedDataI <- reactive({ 106 | if(is.null(input$inf_name))return() 107 | return(filter(infreq, Disease==input$inf_name, date >= input$yearsInf[1], date <= input$yearsInf[2])) 108 | }) 109 | 110 | selectedDataP <- reactive({ 111 | if(input$loctyP=="aregionP") return(filter(cdcdata, display_name == "P&I MORT", Reporting.Area %in% pi_names$location[which(pi_names$type=="region")], 112 | rdate >= input$yearsPI[1], rdate <= input$yearsPI[2])) 113 | if(input$loctyP=="totalP") return(filter(cdcdata, display_name=="P&I MORT", Reporting.Area == "Total", 114 | rdate >= input$yearsPI[1], rdate <= input$yearsPI[2])) 115 | if(is.null(input$location_nameP))return() 116 | if(input$loctyP=="city"||input$loctyP=="regionP") return(filter(cdcdata, display_name == "P&I MORT", Reporting.Area == input$location_nameP, 117 | rdate >= input$yearsPI[1], rdate <= input$yearsPI[2])) 118 | if(input$loctyP=="stateP"){ 119 | if(!(input$location_nameP %in% pi_names$state)){return()} 120 | return(filter(cdcdata, display_name == "P&I MORT", Reporting.Area %in% pi_names$location[pi_names$state==input$location_nameP], 121 | rdate >= input$yearsPI[1], rdate <= input$yearsPI[2]))} 122 | if(input$loctyP=="ctregion"){ 123 | if(!(input$location_nameP %in% pi_names$region)){return()} 124 | return(filter(cdcdata, display_name == "P&I MORT", Reporting.Area %in% pi_names$location[pi_names$region==input$location_nameP], 125 | rdate >= input$yearsPI[1], rdate <= input$yearsPI[2]))} 126 | }) 127 | 128 | 129 | 130 | # Plot data - either a single plot for one location, or faceted plots for all locations of a single type 131 | output$plot1 <- renderPlot({ 132 | if(is.null(input$locty)||is.null(selectedData()))return() 133 | scaletype = "fixed" 134 | 135 | # Depending on whether the "Cumulative" checkbox is checked, set plot aesthetics to either weekly or cumulative counts 136 | switch(input$plotty, 137 | "week" = {aesthetics1 = aes(x=rdate, y=c, group=group) 138 | aesthetics2 = aes(x=rdate, y=fourteenwk.thresh, group=group) 139 | xlabel = "Date"}, 140 | "weeky" = {aesthetics1 = aes(x=MMWR.Week, y=c, group=MMWR.Year, colour=MMWR.Year) 141 | aesthetics2 = aes(x=MMWR.Week, y=fourteenwk.thresh,colour=MMWR.Year) 142 | xlabel = "MMWR Week"} , 143 | "cumuy" = {aesthetics1 = aes(x=MMWR.Week, y=ycumulate, group=MMWR.Year, colour=MMWR.Year) 144 | aesthetics2 = aes(x=MMWR.Week, y=ycumu14,colour=MMWR.Year) 145 | xlabel = "MMWR Week"} 146 | ) 147 | 148 | # Create the main ggplot 149 | p <- ggplot(selectedData(), aesthetics1)+geom_line(stat="identity",position="identity",size=1)+ 150 | ylab("Number Reported")+scale_color_brewer(palette="Set2",name="Weekly case counts")+ 151 | ggtitle(paste("MMWR",input$disease_name, "Reports"))+xlab(xlabel) + geom_point() 152 | 153 | #if(input$plotty=="weeky"||input$plotty=="cumuy") p <- p + scale_x_date(breaks="3 months",limits=c(as.Date("1/1", format="%m/%d"),as.Date("12/31", format="%m/%d")), 154 | # labels=date_format("%b")) 155 | 156 | 157 | 158 | # If the alert threshold box was checked, include a line on the plots. Otherwise, plot with no line. 159 | if(input$alert_line){ 160 | p <- p + geom_point(data=subset(selectedData(),fourteenwk.alert == T),colour='RED') 161 | if(input$plotty=="week") p <- p+geom_line( mapping=aesthetics2, linetype="dashed", colour='RED') 162 | if(input$plotty=="weeky"||input$plotty=="cumuy") p <- p+geom_line( mapping=aesthetics2, linetype="dashed") 163 | } 164 | 165 | if(input$locty=="state"||input$locty=="region"||input$locty=="country") return(p) 166 | 167 | if(is.null(input$fixed_scales)){return()} 168 | if(input$fixed_scales==F) scaletype="free" 169 | 170 | return(p + facet_wrap(~ Reporting.Area, scales=scaletype)+theme(panel.margin = unit(1, "lines"))) 171 | }) 172 | 173 | 174 | output$plot2 <- renderPlot({ 175 | if(is.null(selectedDataI()))return() 176 | scaletype = "fixed" 177 | 178 | # Depending on whether the "Cumulative" checkbox is checked, set plot aesthetics to either weekly or cumulative counts 179 | switch(input$plottyI, 180 | "week" = {aesthetics1 = aes(x=date, y=c) 181 | aesthetics2 = aes(x=date, y=threshold) 182 | xlabel = "Date"}, 183 | "weeky" = {aesthetics1 = aes(x=MMWR.week, y=c, group=MMWR.year, colour=MMWR.year) 184 | aesthetics2 = aes(x=MMWR.week, y=threshold,colour=MMWR.year) 185 | xlabel = "MMWR Week"} , 186 | "cumuy" = {aesthetics1 = aes(x=MMWR.week, y=ycumulate, group=MMWR.year, colour=MMWR.year) 187 | aesthetics2 = aes(x=MMWR.week, y=ycumu14,colour=MMWR.year) 188 | xlabel = "MMWR Week"} 189 | ) 190 | 191 | # Create the main ggplot 192 | p <- ggplot(selectedDataI(), aesthetics1)+geom_line(stat="identity",position="identity",size=1)+ 193 | ylab("Number Reported")+scale_color_brewer(palette="Set2",name="Weekly case counts")+ 194 | ggtitle(paste("MMWR",input$inf_name, "Reports")) + geom_point() + xlab(xlabel) 195 | 196 | # If the alert threshold box was checked, include a line on the plots. Otherwise, plot with no line. 197 | if(input$alert_lineI){ 198 | p <- p+ geom_point(data=subset(selectedDataI(),alert == T),colour='RED') 199 | if(input$plottyI=="week") p <- p+geom_line( mapping=aesthetics2, linetype="dashed", colour='RED') 200 | if(input$plottyI=="weeky"||input$plottyI=="cumuy") p <- p+geom_line( mapping=aesthetics2, linetype="dashed") 201 | } 202 | 203 | return(p) 204 | }) 205 | 206 | output$plot3 <- renderPlot({ 207 | if(any(is.null(input$loctyP),is.null(selectedDataP()),is.null(input$plottyP), 208 | is.null(input$alert_lineP),is.null(input$fixed_scalesP))){ 209 | return() 210 | } 211 | scaletype = "fixed" 212 | 213 | # Depending on whether the "Cumulative" checkbox is checked, set plot aesthetics to either weekly or cumulative counts 214 | switch(input$plottyP, 215 | "week" = {aesthetics1 = aes(x=rdate, y=c) 216 | aesthetics2 = aes(x=rdate, y=fourteenwk.thresh) 217 | xlabel = "Date"}, 218 | "weeky" = {aesthetics1 = aes(x=MMWR.Week, y=c, group=MMWR.Year, colour=MMWR.Year) 219 | aesthetics2 = aes(x=MMWR.Week, y=fourteenwk.thresh,colour=MMWR.Year) 220 | xlabel = "MMWR Week"} , 221 | "cumuy" = {aesthetics1 = aes(x=MMWR.Week, y=ycumulate, group=MMWR.Year, colour=MMWR.Year) 222 | aesthetics2 = aes(x=MMWR.Week, y=ycumu14,colour=MMWR.Year) 223 | xlabel = "MMWR Week"} 224 | ) 225 | 226 | # Create the main ggplot 227 | p <- ggplot(selectedDataP(), aesthetics1)+geom_line(stat="identity",position="identity",size=1)+ 228 | ylab("Number Reported")+scale_color_brewer(palette="Set2",name="Weekly case counts")+ 229 | ggtitle(paste("MMWR P&I Mortality Reports")) + geom_point() + xlab(xlabel) 230 | 231 | 232 | # If the alert threshold box was checked, include a line on the plots. Otherwise, plot with no line. 233 | if(input$alert_lineP){ 234 | p <- p+ geom_point(data=subset(selectedDataP(),fourteenwk.alert == T),colour='RED') 235 | if(input$plottyP=="week"||input$plottyP=="cumu") p <- p+geom_line( mapping=aesthetics2, linetype="dashed", colour='RED') 236 | if(input$plottyP=="weeky"||input$plottyP=="cumuy") p <- p+geom_line( mapping=aesthetics2, linetype="dashed") 237 | } 238 | 239 | if(input$loctyP=="state"||input$loctyP=="region"||input$loctyP=="country") return(p) 240 | 241 | if(is.null(input$fixed_scalesP)){return()} 242 | if(input$fixed_scalesP==F) scaletype="free" 243 | 244 | 245 | 246 | return(p + facet_wrap(~ Reporting.Area, scales=scaletype)) 247 | 248 | }) 249 | 250 | }) 251 | 252 | 253 | 254 | 255 | 256 | ##still to do: 257 | ##infreq and p&i mort 258 | #recode automatic update - create file just for old data, then rescrape new data, reformat, and attach each week. -------------------------------------------------------------------------------- /ui.R: -------------------------------------------------------------------------------- 1 | #ideas: navbar so we can have a separate page for uncommon disease plots and for p&i data 2 | library(shiny) 3 | library(dplyr) 4 | #read in disease and location name data 5 | disease_names <- read.table("disease_names.txt", header=T) 6 | disease_names <- droplevels(filter(disease_names, x!="P&I MORT")) 7 | infrequent <- read.table("inf_dis.txt", header=T) 8 | 9 | 10 | shinyUI(navbarPage("CDC Data Visualization", 11 | tabPanel("Disease Plots", 12 | 13 | # Application title 14 | titlePanel("CDC Weekly Case Count"), 15 | 16 | #The shiny app is made up of two columns: the first column houses all of the user interface, 17 | #including disease name selection, location type, location name, and plot options. 18 | column(4, wellPanel( 19 | # A drop down menu to choose the disease name of interest. 20 | # Reads in disease names from disease_names.txt. Can choose which name to default to. 21 | selectInput('disease_name', 'Disease Name', as.character(levels(disease_names$x)), selected="Salmonellosis"), 22 | 23 | # A drop down menu to choose location. The menu will be populated based on which location type was chosen. 24 | # The checkbox is defined in the server.R file 25 | dateRangeInput('years','Choose date range', start= "2015-01-01", end=Sys.Date(), 26 | min = "2008-01-01", max=Sys.Date() ), 27 | 28 | uiOutput("location"), 29 | 30 | 31 | #A line break to make the interface clearer 32 | br(), 33 | 34 | # A row with two columns: one to choose the location type, and one to choose a plot type. 35 | fluidRow( 36 | column(7, radioButtons("locty", "Location Type", 37 | c("State" = "state", 38 | "Single region" = "region", 39 | "All states within a region"="stregion", 40 | "All regions"="aregion", 41 | "Country" = "country"), selected="aregion") 42 | ), 43 | column(5, radioButtons("plotty", "Plot Type", 44 | c("Weekly data" = "week", 45 | "Weekly data by year" = "weeky", 46 | "Cumulative data by year" = "cumuy"), selected="week")) 47 | ), 48 | 49 | # A row with some plot options. uiOutput("frees") creates a checkbox for 50 | # whether the y-axis scale should be the same for all plots. 51 | # This checkbox only appears for certain location type selections, and is defined in the server.R file. 52 | fluidRow( 53 | h5(strong('Plot Options')), 54 | checkboxInput('alert_line', 'Include alert thresholds (experimental)'), 55 | uiOutput("frees") 56 | )) 57 | ), 58 | 59 | 60 | 61 | # The second column houses the plot(s) of the data that was selected. These plots are defined in the server.R file. 62 | column(8, plotOutput('plot1')) 63 | ), 64 | 65 | tabPanel("Inf. Reported Diseases", 66 | titlePanel("CDC Weekly Case Count for Infrequently Reported Diseases"), 67 | column(4, wellPanel( 68 | # A drop down menu to choose the disease name of interest. 69 | # Reads in disease names from disease_names.txt. Can choose which name to default to. 70 | selectInput('inf_name', 'Disease Name', as.character(levels(infrequent$x)), selected="Typhoid fever"), 71 | dateRangeInput('yearsInf','Choose date range', start= "2015-01-01", end=Sys.Date(), 72 | min = "2014-01-01", max=Sys.Date() ), 73 | radioButtons("plottyI", "Plot Type", 74 | c("Weekly data" = "week", 75 | "Weekly data by year" = "weeky", 76 | "Cumulative data by year" = "cumuy"), selected="week"), 77 | 78 | #A line break to make the interface clearer 79 | br(), 80 | fluidRow( 81 | h5(strong('Plot Options')), 82 | checkboxInput('alert_lineI', 'Include alert thresholds (experimental)') 83 | )) 84 | ), 85 | 86 | column(8, plotOutput('plot2')) 87 | ), 88 | 89 | tabPanel("CDC Pneumonia and Influenza Mortality", 90 | 91 | titlePanel("CDC Weekly Pneumonia and Influenza Mortality"), 92 | column(4, wellPanel( 93 | # A row with two columns: one to choose the location type, and one to choose from a few display options. 94 | # uiOutput("frees") creates a checkbox for whether the y-axis scale should be the same for all plots 95 | # This checkbox only appears for certain location type selections, and is defined in the server.R file. 96 | uiOutput("locationP"), 97 | 98 | fluidRow( 99 | dateRangeInput('yearsPI','Choose date range', start= "2015-01-01", end=Sys.Date(), 100 | min = "2008-01-01", max=Sys.Date() ), 101 | column(7,radioButtons("loctyP", "Location Type", 102 | c("City" = "city", 103 | "Single region" = "regionP", 104 | "All cities within a state" = "stateP", 105 | "All cities within a region"="ctregion", 106 | "All regions"="aregionP", 107 | "Total" = "totalP"), selected="aregionP")), 108 | column(5, radioButtons("plottyP", "Plot Type", 109 | c("Weekly data" = "week", 110 | "Weekly data by year" = "weeky", 111 | "Cumulative data by year" = "cumuy"), selected="week")) 112 | ), 113 | 114 | fluidRow( 115 | h5(strong('Plot Options')), 116 | checkboxInput('alert_lineP', 'Include alert thresholds (experimental)'), 117 | uiOutput("freesP") 118 | )) 119 | ), 120 | 121 | column(8, plotOutput('plot3')) 122 | ), 123 | 124 | tabPanel("More Information", # Information about data collection. 125 | "Data are updated weekly on Thursday at 20:00 CT.", 126 | br(), 127 | br(), 128 | "Please visit", 129 | a("this site", href="http://wwwn.cdc.gov/nndss/document/ProvisionalNationaNotifiableDiseasesSurveillanceData20100927.pdf"), 130 | "for more information on how the data were collected. All data are provisional.", 131 | br(), 132 | br(), 133 | a("See the code", href="https://github.com/NLMichaud/WeeklyCDCPlot"), 134 | br(), 135 | br(), 136 | "Any questions or comments can be sent to", 137 | br(), 138 | "Aaron Kite-Powell: " , 139 | a("akitepowell@gmail.com", href="mailto:akitepowell@gmail.com"), 140 | br(), 141 | "Nick Michaud: ", 142 | a("michaud@iastate.edu", href="mailto:michaud@iastate.edu")) 143 | 144 | ) 145 | ) -------------------------------------------------------------------------------- /url_names.R: -------------------------------------------------------------------------------- 1 | # First column of matrix is the display name for each disease - to be used in plots 2 | # Second column is name used by CDC in downloaded tables, so that disease data can be accessed by appending ..Current.week to the end 3 | # Third column is specific url that the disease data can be located at. 4 | # Fourth column is the year of the data in the url. 5 | # To include additional diseases, add a row the the matrix. 6 | 7 | all <- matrix(c( 8 | c("Cryptosporidiosis","Cryptosporidiosis", "b36e-ru3r", "2014"), 9 | c("Cryptosporidiosis","Cryptosporidiosis", "9n3x-apcd", "2015"), 10 | c("Cryptosporidiosis","Cryptosporidiosis", "kikd-77zw", "2016"), 11 | c("Salmonellosis", "Salmonellosis", "52cr-rw4k", "2014"), 12 | c("Salmonellosis", "Salmonellosis", "d6kj-devz", "2105"), 13 | c("Salmonellosis", "Salmonellosis", "4qb4-rsd8", "2016"), 14 | c("Shigellosis","Shigellosis", "52cr-rw4k", "2014"), 15 | c("Shigellosis","Shigellosis", "n3wf-wtep", "2015"), 16 | c("Shigellosis","Shigellosis", "xv7k-8e7s", "2016"), 17 | c("Pertussis","Pertussis", "8rkx-vimh", "2014"), 18 | c("Pertussis","Pertussis", "d69q-iyrb", "2015"), 19 | c("Pertussis","Pertussis", "bfe6-2gyq", "2016"), 20 | c("Malaria","Malaria", "y6uv-t34t", "2014"), 21 | c("Malaria","Malaria", "7pb7-w9us", "2015"), 22 | c("Malaria","Malaria", "93k9-hy54", "2016"), 23 | c("Legionellosis","Legionellosis", "23gt-ssfe", "2014"), 24 | c("Legionellosis","Legionellosis", "ydsy-yh5w", "2015"), 25 | c("Legionellosis","Legionellosis", "yqwx-bvu7", "2016"), 26 | c("Hepatitis A", "Hepatitis..viral..acute...type.A","rg4j-6mcc", "2014"), 27 | c("Hepatitis A", "Hepatitis..viral..acute...type.A","65xe-6neq", "2015"), 28 | c("Hepatitis A", "Hepatitis..viral..acute...type.A","7vnz-2mjz", "2016"), 29 | c("Hepatitis B, Acute", "Hepatitis..viral..acute...type.B","rg4j-6mcc","2014"), 30 | c("Hepatitis B, Acute", "Hepatitis..viral..acute...type.B","65xe-6neq","2015"), 31 | c("Hepatitis B, Acute", "Hepatitis..viral..acute...type.B","7vnz-2mjz","2016"), 32 | c("Hepatitis C, Acute", "Hepatitis..viral..acute...type.C","rg4j-6mcc","2014"), 33 | c("Hepatitis C, Acute", "Hepatitis..viral..acute...type.C","65xe-6neq","2015"), 34 | c("Hepatitis C, Acute", "Hepatitis..viral..acute...type.C","7vnz-2mjz","2016"), 35 | c("Giardiasis", "Giardiasis", "9ix3-ryt6","2014"), 36 | c("Giardiasis", "Giardiasis", "mpdg-hf57","2015"), 37 | c("Giardiasis", "Giardiasis", "afja-b25e","2016"), 38 | c("Meningococcal Disease Invasive (all serogroups)", "Meningococcal.disease..invasive...All.serogroups", "y6uv-t34t","2014"), 39 | c("Meningococcal Disease Invasive (all serogroups)", "Meningococcal.disease..invasive...All.serogroups", "7pb7-w9us","2015"), 40 | c("Meningococcal Disease Invasive (all serogroups)", "Meningococcal.disease..invasive...All.serogroups", "93k9-hy54","2016"), 41 | c("Mumps", "Mumps", "8rkx-vimh","2014"), 42 | c("Mumps", "Mumps", "d69q-iyrb","2015"), 43 | c("Mumps", "Mumps", "bfe6-2gyq","2016"), 44 | #c("Pneumonia and Influenza Mortality Reports by City/Region, 2014", "P.I..Total","qpap-3u8w"), 45 | #leave out pneumonia for now, format is too different 46 | c("Shiga toxin-producing E. coli (STEC)", "Shiga.toxin.producing.E..coli..STEC..", "52cr-rw4k","2014"), 47 | c("Shiga toxin-producing E. coli (STEC)", "Shiga.toxin.producing.E..coli..STEC..", "n3wf-wtep","2015"), 48 | c("Shiga toxin-producing E. coli (STEC)", "Shiga.toxin.producing.E..coli..STEC..", "xv7k-8e7s","2016"), 49 | c("P&I MORT", "P&I MORT", "qpap-3u8w", "2014"), 50 | c("P&I MORT", "P&I MORT", "7esm-uptm", "2015"), 51 | c("P&I MORT", "P&I MORT", "rpjd-ejph", "2016") 52 | ) 53 | ,ncol=4, byrow=T) 54 | 55 | #Name matrix columns and write to csv file 56 | URL_NAMES <- data.frame(display_name=all[,1],data_name=all[,2],url=all[,3],year=all[,4]) 57 | write.table(URL_NAMES, file="urldat.txt", row.names=FALSE, col.names=TRUE) 58 | 59 | -------------------------------------------------------------------------------- /urldat.txt: -------------------------------------------------------------------------------- 1 | "display_name" "data_name" "url" "year" 2 | "Cryptosporidiosis" "Cryptosporidiosis" "b36e-ru3r" "2014" 3 | "Cryptosporidiosis" "Cryptosporidiosis" "9n3x-apcd" "2015" 4 | "Cryptosporidiosis" "Cryptosporidiosis" "kikd-77zw" "2016" 5 | "Salmonellosis" "Salmonellosis" "52cr-rw4k" "2014" 6 | "Salmonellosis" "Salmonellosis" "d6kj-devz" "2105" 7 | "Salmonellosis" "Salmonellosis" "4qb4-rsd8" "2016" 8 | "Shigellosis" "Shigellosis" "52cr-rw4k" "2014" 9 | "Shigellosis" "Shigellosis" "n3wf-wtep" "2015" 10 | "Shigellosis" "Shigellosis" "xv7k-8e7s" "2016" 11 | "Pertussis" "Pertussis" "8rkx-vimh" "2014" 12 | "Pertussis" "Pertussis" "d69q-iyrb" "2015" 13 | "Pertussis" "Pertussis" "bfe6-2gyq" "2016" 14 | "Malaria" "Malaria" "y6uv-t34t" "2014" 15 | "Malaria" "Malaria" "7pb7-w9us" "2015" 16 | "Malaria" "Malaria" "93k9-hy54" "2016" 17 | "Legionellosis" "Legionellosis" "23gt-ssfe" "2014" 18 | "Legionellosis" "Legionellosis" "ydsy-yh5w" "2015" 19 | "Legionellosis" "Legionellosis" "yqwx-bvu7" "2016" 20 | "Hepatitis A" "Hepatitis..viral..acute...type.A" "rg4j-6mcc" "2014" 21 | "Hepatitis A" "Hepatitis..viral..acute...type.A" "65xe-6neq" "2015" 22 | "Hepatitis A" "Hepatitis..viral..acute...type.A" "7vnz-2mjz" "2016" 23 | "Hepatitis B, Acute" "Hepatitis..viral..acute...type.B" "rg4j-6mcc" "2014" 24 | "Hepatitis B, Acute" "Hepatitis..viral..acute...type.B" "65xe-6neq" "2015" 25 | "Hepatitis B, Acute" "Hepatitis..viral..acute...type.B" "7vnz-2mjz" "2016" 26 | "Hepatitis C, Acute" "Hepatitis..viral..acute...type.C" "rg4j-6mcc" "2014" 27 | "Hepatitis C, Acute" "Hepatitis..viral..acute...type.C" "65xe-6neq" "2015" 28 | "Hepatitis C, Acute" "Hepatitis..viral..acute...type.C" "7vnz-2mjz" "2016" 29 | "Giardiasis" "Giardiasis" "9ix3-ryt6" "2014" 30 | "Giardiasis" "Giardiasis" "mpdg-hf57" "2015" 31 | "Giardiasis" "Giardiasis" "afja-b25e" "2016" 32 | "Meningococcal Disease Invasive (all serogroups)" "Meningococcal.disease..invasive...All.serogroups" "y6uv-t34t" "2014" 33 | "Meningococcal Disease Invasive (all serogroups)" "Meningococcal.disease..invasive...All.serogroups" "7pb7-w9us" "2015" 34 | "Meningococcal Disease Invasive (all serogroups)" "Meningococcal.disease..invasive...All.serogroups" "93k9-hy54" "2016" 35 | "Mumps" "Mumps" "8rkx-vimh" "2014" 36 | "Mumps" "Mumps" "d69q-iyrb" "2015" 37 | "Mumps" "Mumps" "bfe6-2gyq" "2016" 38 | "Shiga toxin-producing E. coli (STEC)" "Shiga.toxin.producing.E..coli..STEC.." "52cr-rw4k" "2014" 39 | "Shiga toxin-producing E. coli (STEC)" "Shiga.toxin.producing.E..coli..STEC.." "n3wf-wtep" "2015" 40 | "Shiga toxin-producing E. coli (STEC)" "Shiga.toxin.producing.E..coli..STEC.." "xv7k-8e7s" "2016" 41 | "P&I MORT" "P&I MORT" "qpap-3u8w" "2014" 42 | "P&I MORT" "P&I MORT" "7esm-uptm" "2015" 43 | "P&I MORT" "P&I MORT" "rpjd-ejph" "2016" 44 | -------------------------------------------------------------------------------- /week.csv: -------------------------------------------------------------------------------- 1 | "","X","Week","Date_Week" 2 | "1",1,200601,2006-01-07 3 | "2",2,200602,2006-01-14 4 | "3",3,200603,2006-01-21 5 | "4",4,200604,2006-01-28 6 | "5",5,200605,2006-02-04 7 | "6",6,200606,2006-02-11 8 | "7",7,200607,2006-02-18 9 | "8",8,200608,2006-02-25 10 | "9",9,200609,2006-03-04 11 | "10",10,200610,2006-03-11 12 | "11",11,200611,2006-03-18 13 | "12",12,200612,2006-03-25 14 | "13",13,200613,2006-04-01 15 | "14",14,200614,2006-04-08 16 | "15",15,200615,2006-04-15 17 | "16",16,200616,2006-04-22 18 | "17",17,200617,2006-04-29 19 | "18",18,200618,2006-05-06 20 | "19",19,200619,2006-05-13 21 | "20",20,200620,2006-05-20 22 | "21",21,200621,2006-05-27 23 | "22",22,200622,2006-06-03 24 | "23",23,200623,2006-06-10 25 | "24",24,200624,2006-06-17 26 | "25",25,200625,2006-06-24 27 | "26",26,200626,2006-07-01 28 | "27",27,200627,2006-07-08 29 | "28",28,200628,2006-07-15 30 | "29",29,200629,2006-07-22 31 | "30",30,200630,2006-07-29 32 | "31",31,200631,2006-08-05 33 | "32",32,200632,2006-08-12 34 | "33",33,200633,2006-08-19 35 | "34",34,200634,2006-08-26 36 | "35",35,200635,2006-09-02 37 | "36",36,200636,2006-09-09 38 | "37",37,200637,2006-09-16 39 | "38",38,200638,2006-09-23 40 | "39",39,200639,2006-09-30 41 | "40",40,200640,2006-10-07 42 | "41",41,200641,2006-10-14 43 | "42",42,200642,2006-10-21 44 | "43",43,200643,2006-10-28 45 | "44",44,200644,2006-11-04 46 | "45",45,200645,2006-11-11 47 | "46",46,200646,2006-11-18 48 | "47",47,200647,2006-11-25 49 | "48",48,200648,2006-12-02 50 | "49",49,200649,2006-12-09 51 | "50",50,200650,2006-12-16 52 | "51",51,200651,2006-12-23 53 | "52",52,200652,2006-12-30 54 | "53",53,200701,2007-01-06 55 | "54",54,200702,2007-01-13 56 | "55",55,200703,2007-01-20 57 | "56",56,200704,2007-01-27 58 | "57",57,200705,2007-02-03 59 | "58",58,200706,2007-02-10 60 | "59",59,200707,2007-02-17 61 | "60",60,200708,2007-02-24 62 | "61",61,200709,2007-03-03 63 | "62",62,200710,2007-03-10 64 | "63",63,200711,2007-03-17 65 | "64",64,200712,2007-03-24 66 | "65",65,200713,2007-03-31 67 | "66",66,200714,2007-04-07 68 | "67",67,200715,2007-04-14 69 | "68",68,200716,2007-04-21 70 | "69",69,200717,2007-04-28 71 | "70",70,200718,2007-05-05 72 | "71",71,200719,2007-05-12 73 | "72",72,200720,2007-05-19 74 | "73",73,200721,2007-05-26 75 | "74",74,200722,2007-06-02 76 | "75",75,200723,2007-06-09 77 | "76",76,200724,2007-06-16 78 | "77",77,200725,2007-06-23 79 | "78",78,200726,2007-06-30 80 | "79",79,200727,2007-07-07 81 | "80",80,200728,2007-07-14 82 | "81",81,200729,2007-07-21 83 | "82",82,200730,2007-07-28 84 | "83",83,200731,2007-08-04 85 | "84",84,200732,2007-08-11 86 | "85",85,200733,2007-08-18 87 | "86",86,200734,2007-08-25 88 | "87",87,200735,2007-09-01 89 | "88",88,200736,2007-09-08 90 | "89",89,200737,2007-09-15 91 | "90",90,200738,2007-09-22 92 | "91",91,200739,2007-09-29 93 | "92",92,200740,2007-10-06 94 | "93",93,200741,2007-10-13 95 | "94",94,200742,2007-10-20 96 | "95",95,200743,2007-10-27 97 | "96",96,200744,2007-11-03 98 | "97",97,200745,2007-11-10 99 | "98",98,200746,2007-11-17 100 | "99",99,200747,2007-11-24 101 | "100",100,200748,2007-12-01 102 | "101",101,200749,2007-12-08 103 | "102",102,200750,2007-12-15 104 | "103",103,200751,2007-12-22 105 | "104",104,200752,2007-12-29 106 | "105",105,200801,2008-01-05 107 | "106",106,200802,2008-01-12 108 | "107",107,200803,2008-01-19 109 | "108",108,200804,2008-01-26 110 | "109",109,200805,2008-02-02 111 | "110",110,200806,2008-02-09 112 | "111",111,200807,2008-02-16 113 | "112",112,200808,2008-02-23 114 | "113",113,200809,2008-03-01 115 | "114",114,200810,2008-03-08 116 | "115",115,200811,2008-03-15 117 | "116",116,200812,2008-03-22 118 | "117",117,200813,2008-03-29 119 | "118",118,200814,2008-04-05 120 | "119",119,200815,2008-04-12 121 | "120",120,200816,2008-04-19 122 | "121",121,200817,2008-04-26 123 | "122",122,200818,2008-05-03 124 | "123",123,200819,2008-05-10 125 | "124",124,200820,2008-05-17 126 | "125",125,200821,2008-05-24 127 | "126",126,200822,2008-05-31 128 | "127",127,200823,2008-06-07 129 | "128",128,200824,2008-06-14 130 | "129",129,200825,2008-06-21 131 | "130",130,200826,2008-06-28 132 | "131",131,200827,2008-07-05 133 | "132",132,200828,2008-07-12 134 | "133",133,200829,2008-07-19 135 | "134",134,200830,2008-07-26 136 | "135",135,200831,2008-08-02 137 | "136",136,200832,2008-08-09 138 | "137",137,200833,2008-08-16 139 | "138",138,200834,2008-08-23 140 | "139",139,200835,2008-08-30 141 | "140",140,200836,2008-09-06 142 | "141",141,200837,2008-09-13 143 | "142",142,200838,2008-09-20 144 | "143",143,200839,2008-09-27 145 | "144",144,200840,2008-10-04 146 | "145",145,200841,2008-10-11 147 | "146",146,200842,2008-10-18 148 | "147",147,200843,2008-10-25 149 | "148",148,200844,2008-11-01 150 | "149",149,200845,2008-11-08 151 | "150",150,200846,2008-11-15 152 | "151",151,200847,2008-11-22 153 | "152",152,200848,2008-11-29 154 | "153",153,200849,2008-12-06 155 | "154",154,200850,2008-12-13 156 | "155",155,200851,2008-12-20 157 | "156",156,200852,2008-12-27 158 | "157",157,200853,2009-01-03 159 | "158",158,200901,2009-01-10 160 | "159",159,200902,2009-01-17 161 | "160",160,200903,2009-01-24 162 | "161",161,200904,2009-01-31 163 | "162",162,200905,2009-02-07 164 | "163",163,200906,2009-02-14 165 | "164",164,200907,2009-02-21 166 | "165",165,200908,2009-02-28 167 | "166",166,200909,2009-03-07 168 | "167",167,200910,2009-03-14 169 | "168",168,200911,2009-03-21 170 | "169",169,200912,2009-03-28 171 | "170",170,200913,2009-04-04 172 | "171",171,200914,2009-04-11 173 | "172",172,200915,2009-04-18 174 | "173",173,200916,2009-04-25 175 | "174",174,200917,2009-05-02 176 | "175",175,200918,2009-05-09 177 | "176",176,200919,2009-05-16 178 | "177",177,200920,2009-05-23 179 | "178",178,200921,2009-05-30 180 | "179",179,200922,2009-06-06 181 | "180",180,200923,2009-06-13 182 | "181",181,200924,2009-06-20 183 | "182",182,200925,2009-06-27 184 | "183",183,200926,2009-07-04 185 | "184",184,200927,2009-07-11 186 | "185",185,200928,2009-07-18 187 | "186",186,200929,2009-07-25 188 | "187",187,200930,2009-08-01 189 | "188",188,200931,2009-08-08 190 | "189",189,200932,2009-08-15 191 | "190",190,200933,2009-08-22 192 | "191",191,200934,2009-08-29 193 | "192",192,200935,2009-09-05 194 | "193",193,200936,2009-09-12 195 | "194",194,200937,2009-09-19 196 | "195",195,200938,2009-09-26 197 | "196",196,200939,2009-10-03 198 | "197",197,200940,2009-10-10 199 | "198",198,200941,2009-10-17 200 | "199",199,200942,2009-10-24 201 | "200",200,200943,2009-10-31 202 | "201",201,200944,2009-11-07 203 | "202",202,200945,2009-11-14 204 | "203",203,200946,2009-11-21 205 | "204",204,200947,2009-11-28 206 | "205",205,200948,2009-12-05 207 | "206",206,200949,2009-12-12 208 | "207",207,200950,2009-12-19 209 | "208",208,200951,2009-12-26 210 | "209",209,200952,2010-01-02 211 | "210",210,201001,2010-01-09 212 | "211",211,201002,2010-01-16 213 | "212",212,201003,2010-01-23 214 | "213",213,201004,2010-01-30 215 | "214",214,201005,2010-02-06 216 | "215",215,201006,2010-02-13 217 | "216",216,201007,2010-02-20 218 | "217",217,201008,2010-02-27 219 | "218",218,201009,2010-03-06 220 | "219",219,201010,2010-03-13 221 | "220",220,201011,2010-03-20 222 | "221",221,201012,2010-03-27 223 | "222",222,201013,2010-04-03 224 | "223",223,201014,2010-04-10 225 | "224",224,201015,2010-04-17 226 | "225",225,201016,2010-04-24 227 | "226",226,201017,2010-05-01 228 | "227",227,201018,2010-05-08 229 | "228",228,201019,2010-05-15 230 | "229",229,201020,2010-05-22 231 | "230",230,201021,2010-05-29 232 | "231",231,201022,2010-06-05 233 | "232",232,201023,2010-06-12 234 | "233",233,201024,2010-06-19 235 | "234",234,201025,2010-06-26 236 | "235",235,201026,2010-07-03 237 | "236",236,201027,2010-07-10 238 | "237",237,201028,2010-07-17 239 | "238",238,201029,2010-07-24 240 | "239",239,201030,2010-07-31 241 | "240",240,201031,2010-08-07 242 | "241",241,201032,2010-08-14 243 | "242",242,201033,2010-08-21 244 | "243",243,201034,2010-08-28 245 | "244",244,201035,2010-09-04 246 | "245",245,201036,2010-09-11 247 | "246",246,201037,2010-09-18 248 | "247",247,201038,2010-09-25 249 | "248",248,201039,2010-10-02 250 | "249",249,201040,2010-10-09 251 | "250",250,201041,2010-10-16 252 | "251",251,201042,2010-10-23 253 | "252",252,201043,2010-10-30 254 | "253",253,201044,2010-11-06 255 | "254",254,201045,2010-11-13 256 | "255",255,201046,2010-11-20 257 | "256",256,201047,2010-11-27 258 | "257",257,201048,2010-12-04 259 | "258",258,201049,2010-12-11 260 | "259",259,201050,2010-12-18 261 | "260",260,201051,2010-12-25 262 | "261",261,201052,2011-01-01 263 | "262",262,201101,2011-01-08 264 | "263",263,201102,2011-01-15 265 | "264",264,201103,2011-01-22 266 | "265",265,201104,2011-01-29 267 | "266",266,201105,2011-02-05 268 | "267",267,201106,2011-02-12 269 | "268",268,201107,2011-02-19 270 | "269",269,201108,2011-02-26 271 | "270",270,201109,2011-03-05 272 | "271",271,201110,2011-03-12 273 | "272",272,201111,2011-03-19 274 | "273",273,201112,2011-03-26 275 | "274",274,201113,2011-04-02 276 | "275",275,201114,2011-04-09 277 | "276",276,201115,2011-04-16 278 | "277",277,201116,2011-04-23 279 | "278",278,201117,2011-04-30 280 | "279",279,201118,2011-05-07 281 | "280",280,201119,2011-05-14 282 | "281",281,201120,2011-05-21 283 | "282",282,201121,2011-05-28 284 | "283",283,201122,2011-06-04 285 | "284",284,201123,2011-06-11 286 | "285",285,201124,2011-06-18 287 | "286",286,201125,2011-06-25 288 | "287",287,201126,2011-07-02 289 | "288",288,201127,2011-07-09 290 | "289",289,201128,2011-07-16 291 | "290",290,201129,2011-07-23 292 | "291",291,201130,2011-07-30 293 | "292",292,201131,2011-08-06 294 | "293",293,201132,2011-08-13 295 | "294",294,201133,2011-08-20 296 | "295",295,201134,2011-08-27 297 | "296",296,201135,2011-09-03 298 | "297",297,201136,2011-09-10 299 | "298",298,201137,2011-09-17 300 | "299",299,201138,2011-09-24 301 | "300",300,201139,2011-10-01 302 | "301",301,201140,2011-10-08 303 | "302",302,201141,2011-10-15 304 | "303",303,201142,2011-10-22 305 | "304",304,201143,2011-10-29 306 | "305",305,201144,2011-11-05 307 | "306",306,201145,2011-11-12 308 | "307",307,201146,2011-11-19 309 | "308",308,201147,2011-11-26 310 | "309",309,201148,2011-12-03 311 | "310",310,201149,2011-12-10 312 | "311",311,201150,2011-12-17 313 | "312",312,201151,2011-12-24 314 | "313",313,201152,2011-12-31 315 | "314",314,201201,2012-01-07 316 | "315",315,201202,2012-01-14 317 | "316",316,201203,2012-01-21 318 | "317",317,201204,2012-01-28 319 | "318",318,201205,2012-02-04 320 | "319",319,201206,2012-02-11 321 | "320",320,201207,2012-02-18 322 | "321",321,201208,2012-02-25 323 | "322",322,201209,2012-03-03 324 | "323",323,201210,2012-03-10 325 | "324",324,201211,2012-03-17 326 | "325",325,201212,2012-03-24 327 | "326",326,201213,2012-03-31 328 | "327",327,201214,2012-04-07 329 | "328",328,201215,2012-04-14 330 | "329",329,201216,2012-04-21 331 | "330",330,201217,2012-04-28 332 | "331",331,201218,2012-05-05 333 | "332",332,201219,2012-05-12 334 | "333",333,201220,2012-05-19 335 | "334",334,201221,2012-05-26 336 | "335",335,201222,2012-06-02 337 | "336",336,201223,2012-06-09 338 | "337",337,201224,2012-06-16 339 | "338",338,201225,2012-06-23 340 | "339",339,201226,2012-06-30 341 | "340",340,201227,2012-07-07 342 | "341",341,201228,2012-07-14 343 | "342",342,201229,2012-07-21 344 | "343",343,201230,2012-07-28 345 | "344",344,201231,2012-08-04 346 | "345",345,201232,2012-08-11 347 | "346",346,201233,2012-08-18 348 | "347",347,201234,2012-08-25 349 | "348",348,201235,2012-09-01 350 | "349",349,201236,2012-09-08 351 | "350",350,201237,2012-09-15 352 | "351",351,201238,2012-09-22 353 | "352",352,201239,2012-09-29 354 | "353",353,201240,2012-10-06 355 | "354",354,201241,2012-10-13 356 | "355",355,201242,2012-10-20 357 | "356",356,201243,2012-10-27 358 | "357",357,201244,2012-11-03 359 | "358",358,201245,2012-11-10 360 | "359",359,201246,2012-11-17 361 | "360",360,201247,2012-11-24 362 | "361",361,201248,2012-12-01 363 | "362",362,201249,2012-12-08 364 | "363",363,201250,2012-12-15 365 | "364",364,201251,2012-12-22 366 | "365",365,201252,2012-12-29 367 | "366",366,201301,2013-01-05 368 | "367",367,201302,2013-01-12 369 | "368",368,201303,2013-01-19 370 | "369",369,201304,2013-01-26 371 | "370",370,201305,2013-02-02 372 | "371",371,201306,2013-02-09 373 | "372",372,201307,2013-02-16 374 | "373",373,201308,2013-02-23 375 | "374",374,201309,2013-03-02 376 | "375",375,201310,2013-03-09 377 | "376",376,201311,2013-03-16 378 | "377",377,201312,2013-03-23 379 | "378",378,201313,2013-03-30 380 | "379",379,201314,2013-04-06 381 | "380",380,201315,2013-04-13 382 | "381",381,201316,2013-04-20 383 | "382",382,201317,2013-04-27 384 | "383",383,201318,2013-05-04 385 | "384",384,201319,2013-05-11 386 | "385",385,201320,2013-05-18 387 | "386",386,201321,2013-05-25 388 | "387",387,201322,2013-06-01 389 | "388",388,201323,2013-06-08 390 | "389",389,201324,2013-06-15 391 | "390",390,201325,2013-06-22 392 | "391",391,201326,2013-06-29 393 | "392",392,201327,2013-07-06 394 | "393",393,201328,2013-07-13 395 | "394",394,201329,2013-07-20 396 | "395",395,201330,2013-07-27 397 | "396",396,201331,2013-08-03 398 | "397",397,201332,2013-08-10 399 | "398",398,201333,2013-08-17 400 | "399",399,201334,2013-08-24 401 | "400",400,201335,2013-08-31 402 | "401",401,201336,2013-09-07 403 | "402",402,201337,2013-09-14 404 | "403",403,201338,2013-09-21 405 | "404",404,201339,2013-09-28 406 | "405",405,201340,2013-10-05 407 | "406",406,201341,2013-10-12 408 | "407",407,201342,2013-10-19 409 | "408",408,201343,2013-10-26 410 | "409",409,201344,2013-11-02 411 | "410",410,201345,2013-11-09 412 | "411",411,201346,2013-11-16 413 | "412",412,201347,2013-11-23 414 | "413",413,201348,2013-11-30 415 | "414",414,201349,2013-12-07 416 | "415",415,201350,2013-12-14 417 | "416",416,201351,2013-12-21 418 | "417",417,201352,2013-12-28 419 | "418",418,201401,2014-01-04 420 | "419",419,201402,2014-01-11 421 | "420",420,201403,2014-01-18 422 | "421",421,201404,2014-01-25 423 | "422",422,201405,2014-02-01 424 | "423",423,201406,2014-02-08 425 | "424",424,201407,2014-02-15 426 | "425",425,201408,2014-02-22 427 | "426",426,201409,2014-03-01 428 | "427",427,201410,2014-03-08 429 | "428",428,201411,2014-03-15 430 | "429",429,201412,2014-03-22 431 | "430",430,201413,2014-03-29 432 | "431",431,201414,2014-04-05 433 | "432",432,201415,2014-04-12 434 | "433",433,201416,2014-04-19 435 | "434",434,201417,2014-04-26 436 | "435",435,201418,2014-05-03 437 | "436",436,201419,2014-05-10 438 | "437",437,201420,2014-05-17 439 | "438",438,201421,2014-05-24 440 | "439",439,201422,2014-05-31 441 | "440",440,201423,2014-06-07 442 | "441",441,201424,2014-06-14 443 | "442",442,201425,2014-06-21 444 | "443",443,201426,2014-06-28 445 | "444",444,201427,2014-07-05 446 | "445",445,201428,2014-07-12 447 | "446",446,201429,2014-07-19 448 | "447",447,201430,2014-07-26 449 | "448",448,201431,2014-08-02 450 | "449",449,201432,2014-08-09 451 | "450",450,201433,2014-08-16 452 | "451",451,201434,2014-08-23 453 | "452",452,201435,2014-08-30 454 | "453",453,201436,2014-09-06 455 | "454",454,201437,2014-09-13 456 | "455",455,201438,2014-09-20 457 | "456",456,201439,2014-09-27 458 | "457",457,201440,2014-10-04 459 | "458",458,201441,2014-10-11 460 | "459",459,201442,2014-10-18 461 | "460",460,201443,2014-10-25 462 | "461",461,201444,2014-11-01 463 | "462",462,201445,2014-11-08 464 | "463",463,201446,2014-11-15 465 | "464",464,201447,2014-11-22 466 | "465",465,201448,2014-11-29 467 | "466",466,201449,2014-12-06 468 | "467",467,201450,2014-12-13 469 | "468",468,201451,2014-12-20 470 | "469",469,201452,2014-12-27 471 | "470",470,201453,2015-01-03 472 | "471",471,201501,2015-01-10 473 | "472",472,201502,2015-01-17 474 | "473",473,201503,2015-01-24 475 | "474",474,201504,2015-01-31 476 | "475",475,201505,2015-02-07 477 | "476",476,201506,2015-02-14 478 | "477",477,201507,2015-02-21 479 | "478",478,201508,2015-02-28 480 | "479",479,201509,2015-03-07 481 | "480",480,201510,2015-03-14 482 | "481",481,201511,2015-03-21 483 | "482",482,201512,2015-03-28 484 | "483",483,201513,2015-04-04 485 | "484",484,201514,2015-04-11 486 | "485",485,201515,2015-04-18 487 | "486",486,201516,2015-04-25 488 | "487",487,201517,2015-05-02 489 | "488",488,201518,2015-05-09 490 | "489",489,201519,2015-05-16 491 | "490",490,201520,2015-05-23 492 | "491",491,201521,2015-05-30 493 | "492",492,201522,2015-06-06 494 | "493",493,201523,2015-06-13 495 | "494",494,201524,2015-06-20 496 | "495",495,201525,2015-06-27 497 | "496",496,201526,2015-07-04 498 | "497",497,201527,2015-07-11 499 | "498",498,201528,2015-07-18 500 | "499",499,201529,2015-07-25 501 | "500",500,201530,2015-08-01 502 | "501",501,201531,2015-08-08 503 | "502",502,201532,2015-08-15 504 | "503",503,201533,2015-08-22 505 | "504",504,201534,2015-08-29 506 | "505",505,201535,2015-09-05 507 | "506",506,201536,2015-09-12 508 | "507",507,201537,2015-09-19 509 | "508",508,201538,2015-09-26 510 | "509",509,201539,2015-10-03 511 | "510",510,201540,2015-10-10 512 | "511",511,201541,2015-10-17 513 | "512",512,201542,2015-10-24 514 | "513",513,201543,2015-10-31 515 | "514",514,201544,2015-11-07 516 | "515",515,201545,2015-11-14 517 | "516",516,201546,2015-11-21 518 | "517",517,201547,2015-11-28 519 | "518",518,201548,2015-12-05 520 | "519",519,201549,2015-12-12 521 | "520",520,201550,2015-12-19 522 | "521",521,201551,2015-12-26 523 | "522",522,201552,2016-01-02 524 | "523",523,201601,2016-01-09 525 | "524",524,201602,2016-01-16 526 | "525",525,201603,2016-01-23 527 | "526",526,201604,2016-01-30 528 | "527",527,201605,2016-02-06 529 | "528",528,201606,2016-02-13 530 | "529",529,201607,2016-02-20 531 | "530",530,201608,2016-02-27 532 | "531",531,201609,2016-03-05 533 | "532",532,201610,2016-03-12 534 | "533",533,201611,2016-03-19 535 | "534",534,201612,2016-03-26 536 | "535",535,201613,2016-04-02 537 | "536",536,201614,2016-04-09 538 | "537",537,201615,2016-04-16 539 | "538",538,201616,2016-04-23 540 | "539",539,201617,2016-04-30 541 | "540",540,201618,2016-05-07 542 | "541",541,201619,2016-05-14 543 | "542",542,201620,2016-05-21 544 | "543",543,201621,2016-05-28 545 | "544",544,201622,2016-06-04 546 | "545",545,201623,2016-06-11 547 | "546",546,201624,2016-06-18 548 | "547",547,201625,2016-06-25 549 | "548",548,201626,2016-07-02 550 | "549",549,201627,2016-07-09 551 | "550",550,201628,2016-07-16 552 | "551",551,201629,2016-07-23 553 | "552",552,201630,2016-07-30 554 | "553",553,201631,2016-08-06 555 | "554",554,201632,2016-08-13 556 | "555",555,201633,2016-08-20 557 | "556",556,201634,2016-08-27 558 | "557",557,201635,2016-09-03 559 | "558",558,201636,2016-09-10 560 | "559",559,201637,2016-09-17 561 | "560",560,201638,2016-09-24 562 | "561",561,201639,2016-10-01 563 | "562",562,201640,2016-10-08 564 | "563",563,201641,2016-10-15 565 | "564",564,201642,2016-10-22 566 | "565",565,201643,2016-10-29 567 | "566",566,201644,2016-11-05 568 | "567",567,201645,2016-11-12 569 | "568",568,201646,2016-11-19 570 | "569",569,201647,2016-11-26 571 | "570",570,201648,2016-12-03 572 | "571",571,201649,2016-12-10 573 | "572",572,201650,2016-12-17 574 | "573",573,201651,2016-12-24 575 | "574",574,201652,2016-12-31 576 | "575",575,201701,2017-01-07 577 | "576",576,201702,2017-01-14 578 | "577",577,201703,2017-01-21 579 | "578",578,201704,2017-01-28 580 | "579",579,201705,2017-02-04 581 | "580",580,201706,2017-02-11 582 | "581",581,201707,2017-02-18 583 | "582",582,201708,2017-02-25 584 | "583",583,201709,2017-03-04 585 | "584",584,201710,2017-03-11 586 | "585",585,201711,2017-03-18 587 | "586",586,201712,2017-03-25 588 | "587",587,201713,2017-04-01 589 | "588",588,201714,2017-04-08 590 | "589",589,201715,2017-04-15 591 | "590",590,201716,2017-04-22 592 | "591",591,201717,2017-04-29 593 | "592",592,201718,2017-05-06 594 | "593",593,201719,2017-05-13 595 | "594",594,201720,2017-05-20 596 | "595",595,201721,2017-05-27 597 | "596",596,201722,2017-06-03 598 | "597",597,201723,2017-06-10 599 | "598",598,201724,2017-06-17 600 | "599",599,201725,2017-06-24 601 | "600",600,201726,2017-07-01 602 | "601",601,201727,2017-07-08 603 | "602",602,201728,2017-07-15 604 | "603",603,201729,2017-07-22 605 | "604",604,201730,2017-07-29 606 | "605",605,201731,2017-08-05 607 | "606",606,201732,2017-08-12 608 | "607",607,201733,2017-08-19 609 | "608",608,201734,2017-08-26 610 | "609",609,201735,2017-09-02 611 | "610",610,201736,2017-09-09 612 | "611",611,201737,2017-09-16 613 | "612",612,201738,2017-09-23 614 | "613",613,201739,2017-09-30 615 | "614",614,201740,2017-10-07 616 | "615",615,201741,2017-10-14 617 | "616",616,201742,2017-10-21 618 | "617",617,201743,2017-10-28 619 | "618",618,201744,2017-11-04 620 | "619",619,201745,2017-11-11 621 | "620",620,201746,2017-11-18 622 | "621",621,201747,2017-11-25 623 | "622",622,201748,2017-12-02 624 | "623",623,201749,2017-12-09 625 | "624",624,201750,2017-12-16 626 | "625",625,201751,2017-12-23 627 | "626",626,201752,2017-12-30 628 | --------------------------------------------------------------------------------