├── .Rbuildignore ├── .gitignore ├── .travis.yml ├── DESCRIPTION ├── NAMESPACE ├── NEWS.md ├── R ├── aaa.r ├── get-paste-metadata.r ├── get-paste.r ├── get-recent-pastes.r ├── get-trending-pastes.r ├── new-paste.r ├── pastebin-api-key.r └── pastebin-package.R ├── README.Rmd ├── README.md ├── man ├── as.character.paste.Rd ├── get_paste.Rd ├── get_paste_metadata.Rd ├── get_recent_pastes.Rd ├── get_trending_pastes.Rd ├── new_paste.Rd ├── pastebin.Rd ├── pastebin_api_key.Rd └── toString.paste.Rd ├── pastebin.Rproj └── tests ├── test-all.R └── testthat └── test-pastebin.R /.Rbuildignore: -------------------------------------------------------------------------------- 1 | ^.*\.Rproj$ 2 | ^\.Rproj\.user$ 3 | ^\.travis\.yml$ 4 | ^README\.*Rmd$ 5 | ^README\.*html$ 6 | ^NOTES\.*Rmd$ 7 | ^NOTES\.*html$ 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .Rproj.user 2 | .Rhistory 3 | .RData 4 | .Rproj 5 | src/*.o 6 | src/*.so 7 | src/*.dll 8 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: r 2 | warnings_are_errors: true 3 | sudo: required 4 | 5 | r: 6 | - oldrel 7 | - release 8 | - devel 9 | 10 | apt_packages: 11 | - libv8-dev 12 | - xclip 13 | 14 | env: 15 | global: 16 | - CRAN: http://cran.rstudio.com 17 | 18 | notifications: 19 | email: 20 | - bob@rud.is 21 | irc: 22 | channels: 23 | - "104.236.112.222#builds" 24 | nick: travisci 25 | -------------------------------------------------------------------------------- /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: pastebin 2 | Type: Package 3 | Title: Tools to Work with the 'pastebin' API 4 | Version: 0.2.1 5 | Date: 2017-07-31 6 | Author: Bob Rudis (bob@@rud.is) 7 | Maintainer: Bob Rudis 8 | Description: Tools to work with the 'pastebin' API. 9 | URL: https://github.com/hrbrmstr/pastebin 10 | BugReports: https://github.com/hrbrmstr/pastebin/issues 11 | License: AGPL 12 | Suggests: 13 | testthat 14 | Depends: 15 | R (>= 3.2.0) 16 | Imports: 17 | dplyr, 18 | stats, 19 | purrr, 20 | httr, 21 | jsonlite, 22 | tibble, 23 | xml2 24 | RoxygenNote: 6.0.1 25 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | # Generated by roxygen2: do not edit by hand 2 | 3 | S3method(as.character,paste) 4 | S3method(toString,paste) 5 | export(get_paste) 6 | export(get_paste_metadata) 7 | export(get_recent_pastes) 8 | export(get_trending_pastes) 9 | export(new_paste) 10 | export(pastebin_api_key) 11 | import(httr) 12 | import(purrr) 13 | importFrom(dplyr,mutate) 14 | importFrom(jsonlite,fromJSON) 15 | importFrom(stats,setNames) 16 | importFrom(tibble,as_tibble) 17 | importFrom(xml2,read_html) 18 | importFrom(xml2,xml_children) 19 | importFrom(xml2,xml_find_all) 20 | importFrom(xml2,xml_name) 21 | importFrom(xml2,xml_text) 22 | -------------------------------------------------------------------------------- /NEWS.md: -------------------------------------------------------------------------------- 1 | 0.2.1 2 | * Add better handling & messaging when using the Scraping API (Fixes #1) 3 | 4 | 0.2.0 5 | * Cleanup/CRAN checks 6 | * Move to https API as required by pastebin 7 | 8 | 0.1.0 9 | * Initial release 10 | -------------------------------------------------------------------------------- /R/aaa.r: -------------------------------------------------------------------------------- 1 | globalVariables( 2 | c(".", "paste_expire_date", "paste_date", "paste_size", 3 | "paste_private", "paste_hits") 4 | ) 5 | -------------------------------------------------------------------------------- /R/get-paste-metadata.r: -------------------------------------------------------------------------------- 1 | #' Get paste metadata 2 | #' 3 | #' @md 4 | #' @param paste_id paste id 5 | #' @note This API call uses the Scraping API which requires a paid account and a white-listed IP address. 6 | #' @references [Scraping API](https://pastebin.com/api_scraping_faq) 7 | #' @export 8 | get_paste_metadata <- function(paste_id) { 9 | 10 | res <- httr::GET("https://pastebin.com/api_scrape_item_meta.php", 11 | query=list(i=paste_id)) 12 | 13 | httr::stop_for_status(res) 14 | 15 | res <- httr::content(res, as="text", encoding="UTF-8") 16 | 17 | if (grepl("THIS IP", res[1])) { 18 | message(res) 19 | return(invisible(NULL)) 20 | } 21 | 22 | out <- jsonlite::fromJSON(res) 23 | 24 | out$date <- as.POSIXct(as.numeric(out$date), origin="1970-01-01") 25 | out$size <- as.numeric(out$size) 26 | out$hits <- as.numeric(out$hits) 27 | out$expire <- as.numeric(out$expire) 28 | out$expire <- as.POSIXct(ifelse(out$expire==0, NA, out$expire), origin="1970-01-01") 29 | 30 | out 31 | 32 | } 33 | -------------------------------------------------------------------------------- /R/get-paste.r: -------------------------------------------------------------------------------- 1 | #' Get raw paste data 2 | #' 3 | #' @md 4 | #' @param x paste id 5 | #' @param use_scraping_api if a pro member, set this to `TRUE`, otherwise leave it `FALSE` 6 | #' and be kind to their servers lest ye be banned. 7 | #' @param include_metadata if `use_scraping_api` is `TRUE` and this is `TRUE`, the returned 8 | #' `list` will include metadata 9 | #' @return a `list` with the paste text or the paste text plus metadata. A `list` is returned 10 | #' to make it easier to deal with the results programmatically. Returning a `list` 11 | #' in one call context and a `character` vector in another may be OK interactively 12 | #' bit it creates a situation where you need to write `if` logic to handle 13 | #' programmatically. Use [toString] to extract just the paste body 14 | #' @note This API call can use the Scraping API which requires a paid account and a white-listed IP address. 15 | #' @references [Scraping API](https://pastebin.com/api_scraping_faq) 16 | #' @export 17 | get_paste <- function(x, use_scraping_api=FALSE, include_metadata=FALSE) { 18 | 19 | meta <- NULL 20 | 21 | if (!use_scraping_api) { 22 | 23 | res <- httr::GET(sprintf("https://pastebin.com/raw/%s", x)) 24 | httr::stop_for_status(res) 25 | paste_text <- httr::content(res, as="text", encoding="UTF-8") 26 | 27 | } else { 28 | 29 | res <- httr::GET("https://pastebin.com/api_scrape_item.php", 30 | query=list(i=x)) 31 | httr::stop_for_status(res) 32 | paste_text <- httr::content(res, as="text", encoding="UTF-8") 33 | 34 | if (include_metadata) meta <- get_paste_metadata(x) 35 | 36 | } 37 | 38 | ret <- list(text=paste_text) 39 | if (!is.null(meta)) ret$meta <- meta 40 | 41 | class(ret) <- c("paste", "list") 42 | 43 | ret 44 | 45 | } 46 | 47 | #' Extract just the paste text from a paste object 48 | #' 49 | #' @param x paste object 50 | #' @param ... unused 51 | #' @export 52 | toString.paste <- function(x, ...) { x$text } 53 | 54 | 55 | #' Extract just the paste text from a paste object 56 | #' 57 | #' @param x paste object 58 | #' @param ... unused 59 | #' @export 60 | as.character.paste <- function(x, ...) { x$text } 61 | -------------------------------------------------------------------------------- /R/get-recent-pastes.r: -------------------------------------------------------------------------------- 1 | #' Get recent pastes 2 | #' 3 | #' @md 4 | #' @param limit number of recent pastes to fetch. Limit is 500, default is 50. 5 | #' @param lang limit the recent paste list to a particular language. Default is all pastes 6 | #' @note This API call uses the Scraping API which requires a paid account and a white-listed IP address. 7 | #' @references [Scraping API](https://pastebin.com/api_scraping_faq) 8 | #' @export 9 | get_recent_pastes <- function(limit=50, lang=NULL) { 10 | 11 | if (limit<1) limit <- 50 12 | if (limit>500) limit <- 500 13 | 14 | params <- list(limit=limit) 15 | if (!is.null(lang)) params$lang <- lang 16 | 17 | res <- httr::GET("https://pastebin.com/api_scraping.php", 18 | query=params) 19 | httr::stop_for_status(res) 20 | 21 | res <- httr::content(res, as="text", encoding="UTF-8") 22 | 23 | if (grepl("THIS IP", res[1])) { 24 | message(res) 25 | return(invisible(NULL)) 26 | } 27 | 28 | out <- as_tibble(jsonlite::fromJSON(res)) 29 | 30 | out$date <- as.POSIXct(as.numeric(out$date), origin="1970-01-01") 31 | out$size <- as.numeric(out$size) 32 | out$expire <- as.numeric(out$expire) 33 | out$expire <- as.POSIXct(ifelse(out$expire==0, NA, out$expire), origin="1970-01-01") 34 | 35 | out 36 | 37 | } 38 | 39 | -------------------------------------------------------------------------------- /R/get-trending-pastes.r: -------------------------------------------------------------------------------- 1 | #' Get trending pastes 2 | #' 3 | #' @md 4 | #' @param pastebin_key pastebin API key 5 | #' @references [https://pastebin.com/api#10](https://pastebin.com/api#10) 6 | #' @export 7 | get_trending_pastes <- function(pastebin_key=pastebin_api_key()) { 8 | 9 | res <- httr::POST("https://pastebin.com/api/api_post.php", 10 | body=list(api_dev_key=pastebin_key, 11 | api_option="trends"), 12 | encode="form") 13 | 14 | httr::stop_for_status(res) 15 | httr::content(res, as="text", encoding="UTF-8") -> out 16 | 17 | read_html(out) %>% 18 | xml_find_all(".//paste") %>% 19 | map(xml_children) %>% 20 | map(~map(.x, ~setNames(list(xml_text(.)), xml_name(.)))) %>% 21 | map_df(flatten_df) %>% 22 | mutate(paste_date=as.POSIXct(as.numeric(paste_date), origin="1970-01-01"), 23 | paste_size=as.numeric(paste_size), 24 | paste_hits=as.numeric(paste_hits), 25 | paste_expire_date=as.numeric(paste_expire_date), 26 | paste_expire_date=as.POSIXct(ifelse(paste_expire_date==0, NA, paste_expire_date), 27 | origin="1970-01-01"), 28 | paste_private=as.logical(as.numeric(paste_private))) %>% 29 | setNames(gsub("^paste_", "", colnames(.))) 30 | 31 | } 32 | -------------------------------------------------------------------------------- /R/new-paste.r: -------------------------------------------------------------------------------- 1 | #' Create a new paste 2 | #' 3 | #' @md 4 | #' @param text of paste 5 | #' @param name name/title of paste 6 | #' @param format hint for syntax highlighting. Defaults to `text`. See 7 | #' [the detail page](https://pastebin.com/api#5) for more info. 8 | #' @param impersonate if `TRUE` then `PASTEBIN_USER` and `PASTEBIN_PASSWORD` _must_ be set 9 | #' in order to generate a user key to be applied with the API key. Don't blame me, 10 | #' blame [pastebin](https://pastebin.com/api#8). 11 | #' @param visibility one of `public`, `unlisted` or `private`. Defaults to `public` 12 | #' @param expires either `n` for never or an abbreviated time expiration string in the form 13 | #' of a digit (the "number of") and a units character `m` for minute(s), 14 | #' `d` for day(s), `w` for week(s). Defaults to `n` (never). See 15 | #' [the detail page](https://pastebin.com/api#6) for more info. 16 | #' @param pastebin_key pastebin API key 17 | #' @note The maximum size a paste can be is 512 kilobytes (0.5 megabytes). Pro members are 18 | #' allowed to create pastes up to 10 megabytes. 19 | #' @export 20 | new_paste <- function(text, name=NULL, format="text", impersonate=FALSE, 21 | visibility=c("public", "unlisted", "private"), 22 | expires="n", pastebin_key=pastebin_api_key()) { 23 | 24 | expires <- gsub(" ", "", toupper(expires)) 25 | 26 | visibility <- match.arg(visibility, c("public", "unlisted", "private")) 27 | visibility <- which(visibility == c("public", "unlisted", "private")) 28 | 29 | params <- list(api_dev_key=pastebin_key, 30 | api_option="paste", 31 | api_paste_code=text, 32 | api_paste_name=name, 33 | api_paste_format=format, 34 | api_user_key="", 35 | api_paste_expire_date=expires, 36 | api_paste_private=visibility) 37 | 38 | if (impersonate) { 39 | 40 | httr::POST("https://pastebin.com/api/api_login.php", 41 | body=list(api_dev_key=pastebin_key, 42 | api_user_name=Sys.getenv("PASTEBIN_USER"), 43 | api_user_password=Sys.getenv("PASTEBIN_PASSWORD")), 44 | encode="form") -> u_res 45 | 46 | httr::stop_for_status(u_res) 47 | 48 | params$api_user_key <- httr::content(u_res, as="text", encoding="UTF-8") 49 | 50 | } 51 | 52 | httr::POST("https://pastebin.com/api/api_post.php", body=params, encode="form") -> res 53 | 54 | httr::stop_for_status(res) 55 | 56 | httr::content(res, as="text", encoding="UTF-8") 57 | 58 | } 59 | 60 | 61 | -------------------------------------------------------------------------------- /R/pastebin-api-key.r: -------------------------------------------------------------------------------- 1 | #' Get or set PASTEBIN_API_KEY value 2 | #' 3 | #' The API wrapper functions in this package all rely on a pastebin API 4 | #' key residing in the environment variable \code{PASTEBIN_API_KEY}. The 5 | #' easiest way to accomplish this is to set it in the `\code{.Renviron}` file in your 6 | #' home directory. 7 | #' 8 | #' @param force force setting a new pastebin API key for the current environment? 9 | #' @return atomic character vector containing the pastebin API key 10 | #' @note an pastebin API key is only necessary for "poster" access 11 | #' @export 12 | pastebin_api_key <- function(force = FALSE) { 13 | 14 | env <- Sys.getenv('PASTEBIN_API_KEY') 15 | if (!identical(env, "") && !force) return(env) 16 | 17 | if (!interactive()) { 18 | stop("Please set env var PASTEBIN_API_KEY to your pastebin API key", 19 | call. = FALSE) 20 | } 21 | 22 | message("Couldn't find env var PASTEBIN_API_KEY See ?pastebin_api_key for more details.") 23 | message("Please enter your API key and press enter:") 24 | pat <- readline(": ") 25 | 26 | if (identical(pat, "")) { 27 | stop("pastebin API key entry failed", call. = FALSE) 28 | } 29 | 30 | message("Updating PASTEBIN_API_KEY env var") 31 | Sys.setenv(PASTEBIN_API_KEY = pat) 32 | 33 | pat 34 | 35 | } 36 | -------------------------------------------------------------------------------- /R/pastebin-package.R: -------------------------------------------------------------------------------- 1 | #' Tools to work with the pastebin API 2 | #' 3 | #' @name pastebin 4 | #' @docType package 5 | #' @author Bob Rudis (bob@@rud.is) 6 | #' @import purrr httr 7 | #' @importFrom dplyr mutate 8 | #' @importFrom xml2 read_html xml_find_all xml_name xml_text xml_children 9 | #' @importFrom jsonlite fromJSON 10 | #' @importFrom tibble as_tibble 11 | #' @importFrom stats setNames 12 | NULL 13 | -------------------------------------------------------------------------------- /README.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | output: rmarkdown::github_document 3 | --- 4 | 5 | `pastebin` : Tools to work with the [pastebin](https://pastebin.com/) API 6 | 7 | >Pastebin is a website where you can store any text online for easy sharing. The website is mainly used by programmers to store pieces of sources code or configuration information, but anyone is more than welcome to paste any type of text. The idea behind the site is to make it more convenient for people to share large amounts of text online. 8 | 9 | **WIP!! The package API will very likely be in rapid change mode for a bit** 10 | 11 | The following functions are implemented: 12 | 13 | - `get_paste`: Get raw paste data 14 | - `get_paste_metadata`: Get paste metadata 15 | - `get_trending_pastes`: Get trending pastes 16 | - `get_recent_pastes`: Get recent pastes 17 | - `new_paste`: Create a new paste 18 | - `pastebin_api_key`: Get or set `PASTEBIN_API_KEY` value 19 | - `toString.paste`: Extract just the paste text from a paste object 20 | - `as.character.paste`: Extract just the paste text from a paste object 21 | 22 | If you want the `impersonate` parameter of `new_paste()` to work you _must_ set `PASTEBIN_USER` and `PASTEBIN_PASSWORD` (preferably in `~/.Renviron`). 23 | 24 | ### TODO 25 | 26 | - Paste as user 27 | - Finish API coverage including "Pro"" paste features 28 | - Testing 29 | 30 | ### Installation 31 | 32 | ```{r eval=FALSE} 33 | devtools::install_github("hrbrmstr/pastebin") 34 | ``` 35 | 36 | ```{r message=FALSE, warning=FALSE, error=FALSE} 37 | options(width=120) 38 | ``` 39 | 40 | ### Usage 41 | 42 | ```{r message=FALSE, warning=FALSE, error=FALSE} 43 | library(pastebin) 44 | library(tidyverse) 45 | 46 | # current verison 47 | packageVersion("pastebin") 48 | 49 | get_trending_pastes() %>% 50 | arrange(desc(hits)) 51 | 52 | r_pastes <- get_recent_pastes(lang="rsplus") 53 | 54 | glimpse(r_pastes) 55 | ``` 56 | 57 | Can't always trust the `lang` setting. Some non-R stuff in there: 58 | 59 | ```{r message=FALSE} 60 | walk(r_pastes$key[1:10], ~print(toString(get_paste(.)))) 61 | ``` 62 | 63 | Since the user is obvious: 64 | 65 | ```{r message} 66 | mebbe_r <- filter(r_pastes, user != "AllRls_net") 67 | walk(mebbe_r$key, ~print(toString(get_paste(.)))) 68 | ``` 69 | 70 | ### Test Results 71 | 72 | ```{r message=FALSE, warning=FALSE, error=FALSE} 73 | library(pastebin) 74 | library(testthat) 75 | 76 | date() 77 | 78 | test_dir("tests/") 79 | ``` 80 | 81 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | `pastebin` : Tools to work with the [pastebin](https://pastebin.com/) API 3 | 4 | > Pastebin is a website where you can store any text online for easy sharing. The website is mainly used by programmers to store pieces of sources code or configuration information, but anyone is more than welcome to paste any type of text. The idea behind the site is to make it more convenient for people to share large amounts of text online. 5 | 6 | **WIP!! The package API will very likely be in rapid change mode for a bit** 7 | 8 | The following functions are implemented: 9 | 10 | - `get_paste`: Get raw paste data 11 | - `get_paste_metadata`: Get paste metadata 12 | - `get_trending_pastes`: Get trending pastes 13 | - `get_recent_pastes`: Get recent pastes 14 | - `new_paste`: Create a new paste 15 | - `pastebin_api_key`: Get or set `PASTEBIN_API_KEY` value 16 | - `toString.paste`: Extract just the paste text from a paste object 17 | - `as.character.paste`: Extract just the paste text from a paste object 18 | 19 | If you want the `impersonate` parameter of `new_paste()` to work you *must* set `PASTEBIN_USER` and `PASTEBIN_PASSWORD` (preferably in `~/.Renviron`). 20 | 21 | ### TODO 22 | 23 | - Paste as user 24 | - Finish API coverage including "Pro"" paste features 25 | - Testing 26 | 27 | ### Installation 28 | 29 | ``` r 30 | devtools::install_github("hrbrmstr/pastebin") 31 | ``` 32 | 33 | ``` r 34 | options(width=120) 35 | ``` 36 | 37 | ### Usage 38 | 39 | ``` r 40 | library(pastebin) 41 | library(tidyverse) 42 | 43 | # current verison 44 | packageVersion("pastebin") 45 | ``` 46 | 47 | ## [1] '0.1.0' 48 | 49 | ``` r 50 | get_trending_pastes() %>% 51 | arrange(desc(hits)) 52 | ``` 53 | 54 | ## # A tibble: 18 x 10 55 | ## key date title size expire_date private 56 | ## 57 | ## 1 9tMkbSb3 2017-07-27 08:38:31 Katie Cassidy TheFappening 177 NA FALSE 58 | ## 2 5K5wiYuX 2017-07-25 17:11:30 I2 Patch Notes 3275 NA FALSE 59 | ## 3 C1Sq9q9r 2017-07-25 03:57:21 489 User pass Israel leaked #OpAlAqsa 35749 NA FALSE 60 | ## 4 LsqPqNk6 2017-07-26 10:56:06 30227 NA FALSE 61 | ## 5 5dSjgP3x 2017-07-25 22:45:40 PL4TZ1N0M3D4P4UNT4K0.nfo 3566 NA FALSE 62 | ## 6 cbb6Ap8h 2017-07-26 18:26:05 Stranger tells all 7871 NA FALSE 63 | ## 7 cD7PYS0u 2017-07-26 16:51:10 XMAC IS DOWN (KV PROBLEM) 13430 NA FALSE 64 | ## 8 RFSpucEu 2017-07-26 13:24:51 lista nueva 15808 NA FALSE 65 | ## 9 KXdK7kMQ 2017-07-25 15:33:49 4181 NA FALSE 66 | ## 10 qA3mYCtc 2017-07-25 05:34:04 Junk Code 729 NA FALSE 67 | ## 11 SWQxX6DB 2017-07-27 20:57:20 CINEMA 1 1769 NA FALSE 68 | ## 12 YVZq9iDA 2017-07-25 11:51:32 Updated UO 07/25 1978 NA FALSE 69 | ## 13 Vzw3gRax 2017-07-24 22:07:32 1046 NA FALSE 70 | ## 14 xePeziuV 2017-07-27 11:39:36 Dual Core @ DEF CON 25 514 NA FALSE 71 | ## 15 53ZMSRuM 2017-07-27 11:47:00 Anonymous Proxy List Thursday 27th of July 2017 11:47:30 AM 7823 NA FALSE 72 | ## 16 KJ2thMMc 2017-07-27 18:06:35 344 NA FALSE 73 | ## 17 AtPExRB8 2017-07-28 01:17:35 reily santo 100912 NA FALSE 74 | ## 18 V5P6EDMq 2017-07-27 22:17:07 34562 NA FALSE 75 | ## # ... with 4 more variables: format_short , format_long , url , hits 76 | 77 | ``` r 78 | r_pastes <- get_recent_pastes(lang="rsplus") 79 | 80 | glimpse(r_pastes) 81 | ``` 82 | 83 | ## Observations: 50 84 | ## Variables: 9 85 | ## $ scrape_url "https://pastebin.com/api_scrape_item.php?i=vQiA3Uv3", "https://pastebin.com/api_scrape_item.php... 86 | ## $ full_url "https://pastebin.com/vQiA3Uv3", "https://pastebin.com/eZX4LNgw", "https://pastebin.com/BLWeERvn... 87 | ## $ date 2017-07-28 12:53:03, 2017-07-28 09:40:33, 2017-07-28 09:36:40, 2017-07-28 00:17:50, 2017-07-27 ... 88 | ## $ key "vQiA3Uv3", "eZX4LNgw", "BLWeERvn", "RKmxRFMQ", "587GBe45", "WVdWwGsD", "GWBxWxMA", "6iMFvXS7", ... 89 | ## $ size 1008, 1437, 1425, 2272, 1647, 1986, 196, 5598, 1888, 5124, 941, 4501, 105, 253, 244, 248, 1489, ... 90 | ## $ expire NA, 2017-07-29 09:40:33, 2017-07-29 09:36:40, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, NA, N... 91 | ## $ title "", "", "", "", "", "", "", "", "", "", "Zona altimetrica dei Comuni dell'Emilia Romagna", "buy ... 92 | ## $ syntax "rsplus", "rsplus", "rsplus", "rsplus", "rsplus", "rsplus", "rsplus", "rsplus", "rsplus", "rsplu... 93 | ## $ user "", "", "", "", "", "", "", "", "", "", "cldscchttn", "carder0077", "", "", "", "", "", "", "mga... 94 | 95 | Can't always trust the `lang` setting. Some non-R stuff in there: 96 | 97 | ``` r 98 | walk(r_pastes$key[1:10], ~print(toString(get_paste(.)))) 99 | ``` 100 | 101 | ## [1] "n: Anzahl Leutchen in deiner Studie\r\nx: Anzahl Leute, die deiner Theorie entsprechen (bzw. *nicht* entsprechen, der statistische Test ist ja symmetrisch)\r\np: Wahrscheinlichkeit, dass das Zahlenverhältnis als Zufall interpretiert werden kann. Unter 0.05 kann man sagen, dass da wirklich etwas handfestes vorliegt, unter etwa 0.15 kann man sagen, dass es sich lohnt, weiterzuforschen\r\n\r\n n x p\r\n2 4 0 0.125\r\n3 4 4 0.125\r\n4 5 0 0.062\r\n5 5 5 0.062\r\n6 6 0 0.031\r\n7 6 6 0.031\r\n8 7 0 0.016\r\n9 7 1 0.125\r\n10 7 6 0.125\r\n11 7 7 0.016\r\n12 8 0 0.008\r\n13 8 1 0.070\r\n14 8 7 0.070\r\n15 8 8 0.008\r\n16 9 0 0.004\r\n17 9 1 0.039\r\n18 9 8 0.039\r\n19 9 9 0.004\r\n20 10 0 0.002\r\n21 10 1 0.021\r\n22 10 2 0.109\r\n23 10 8 0.109\r\n24 10 9 0.021\r\n25 10 10 0.002\r\n26 11 0 0.001\r\n27 11 1 0.012\r\n28 11 2 0.065\r\n29 11 9 0.065\r\n30 11 10 0.012\r\n31 11 11 0.001\r\n32 12 0 0.000\r\n33 12 1 0.006\r\n34 12 2 0.039\r\n35 12 3 0.146\r\n36 12 9 0.146\r\n37 12 10 0.039\r\n38 12 11 0.006\r\n39 12 12 0.000" 102 | ## [1] "library(dplyr)\r\nlibrary(xgboost)\r\n\r\ntrain <- data.frame(\r\n X1 = runif(10000, -10, 10),\r\n X2 = runif(10000, -10, 10),\r\n X3 = runif(10000, -10, 10),\r\n X4 = runif(10000, -10, 10)\r\n) %>%\r\n mutate(Y = X1*X2*X3*X4) %>%\r\n {xgb.DMatrix(as.matrix(.[1:4]), label = .$Y)}\r\n\r\nmaxrounds = 10000\r\nearly_stop = 50\r\nnfold = 5\r\nparams <- list(\r\n objective = \"reg:linear\",\r\n max_depth = 2,\r\n eta = 0.1\r\n)\r\nrounds <- xgb.cv(params, train, nrounds = maxrounds, early_stopping_rounds = early_stop, nfold = nfold)\r\nmodel.shallow <- xgb.train(params, train, nrounds = rounds$best_iteration)\r\n\r\nparams <- list(\r\n objective = \"reg:linear\",\r\n max_depth = 5,\r\n eta = 0.1\r\n)\r\nrounds <- xgb.cv(params, train, nrounds = maxrounds, early_stopping_rounds = early_stop, nfold = nfold)\r\nmodel.deep <- xgb.train(params, train, nrounds = rounds$best_iteration)\r\n\r\ntest <- data.frame(\r\n X1 = runif(1000, -10, 10),\r\n X2 = runif(1000, -10, 10),\r\n X3 = runif(1000, -10, 10),\r\n X4 = runif(1000, -10, 10)\r\n) %>%\r\n mutate(Y = X1*X2*X3*X4)\r\n\r\npreds.shallow <- predict(model.shallow, xgb.DMatrix(as.matrix(test[1:4])))\r\npreds.deep <- predict(model.deep, xgb.DMatrix(as.matrix(test[1:4])))\r\n\r\nresults <- data.frame(preds.shallow, preds.deep, label = test$Y) %>%\r\n summarise(rmse.shallow = mean((preds.shallow - label)^2) %>% sqrt,\r\n rmse.deep = mean((preds.deep - label)^2) %>% sqrt)\r\n\r\n> results\r\n rmse.shallow rmse.deep\r\n1 1047.639 1047.561" 103 | ## [1] "library(dplyr)\r\nlibrary(xgboost)\r\n\r\ntrain <- data.frame(\r\n X1 = runif(10000, -1, 1),\r\n X2 = runif(10000, -1, 1),\r\n X3 = runif(10000, -1, 1),\r\n X4 = runif(10000, -1, 1)\r\n) %>%\r\n mutate(Y = X1*X2*X3*X4) %>%\r\n {xgb.DMatrix(as.matrix(.[1:4]), label = .$Y)}\r\n\r\nmaxrounds = 10000\r\nearly_stop = 50\r\nnfold = 5\r\nparams <- list(\r\n objective = \"reg:linear\",\r\n max_depth = 2,\r\n eta = 0.1\r\n)\r\nrounds <- xgb.cv(params, train, nrounds = maxrounds, early_stopping_rounds = early_stop, nfold = nfold)\r\nmodel.shallow <- xgb.train(params, train, nrounds = rounds$best_iteration)\r\n\r\nparams <- list(\r\n objective = \"reg:linear\",\r\n max_depth = 5,\r\n eta = 0.1\r\n)\r\nrounds <- xgb.cv(params, train, nrounds = maxrounds, early_stopping_rounds = early_stop, nfold = nfold)\r\nmodel.deep <- xgb.train(params, train, nrounds = rounds$best_iteration)\r\n\r\ntest <- data.frame(\r\n X1 = runif(1000, -1, 1),\r\n X2 = runif(1000, -1, 1),\r\n X3 = runif(1000, -1, 1),\r\n X4 = runif(1000, -1, 1)\r\n) %>%\r\n mutate(Y = X1*X2*X3*X4)\r\n\r\npreds.shallow <- predict(model.shallow, xgb.DMatrix(as.matrix(test[1:4])))\r\npreds.deep <- predict(model.deep, xgb.DMatrix(as.matrix(test[1:4])))\r\n\r\nresults <- data.frame(preds.shallow, preds.deep, label = test$Y) %>%\r\n summarise(rmse.shallow = mean((preds.shallow - label)^2) %>% sqrt,\r\n rmse.deep = mean((preds.deep - label)^2) %>% sqrt)\r\n\r\n\r\n> results\r\n rmse.shallow rmse.deep\r\n1 0.111355 0.07852135" 104 | ## [1] "library(mvtnorm)\r\nlibrary(psych)\r\nsimData <- function(n,dv){\r\n x <- rmvnorm(n,rep(0,dv),sigma=diag(1,dv))\r\n colnames(x) <- paste0('X',1:dv)\r\n x\r\n}\r\n\r\ncor.p <- function(d){\r\n cors <- corr.test(d,ci=FALSE)\r\n cors.p <- cors$p[lower.tri(cors$p)]\r\n cors.p\r\n}\r\n\r\nranSelect <- function(d){\r\n n <- round(runif(1,min=10,max=nrow(d)),digits=0)\r\n s <- sample(1:nrow(d),size=n,replace = FALSE)\r\n list(n=n,s=s,d=d[s,])\r\n}\r\n\r\nfindSig <- function(d,tryMax = 1000,alpha){\r\n out <- list()\r\n sig <- 0\r\n try <- 0\r\n while(try <= tryMax){\r\n d.rand <- ranSelect(d)\r\n corOut <- cor.p(d.rand$d)\r\n if(any(corOut <= alpha)){\r\n sig <- sig + 1\r\n out[[sig]] <- list(s=sort(d.rand$s),n=d.rand$n,n.sig=sum(corOut BF){\r\n sig <- sig + 1\r\n out[[sig]] <- list(s=sort(d.rand$s),n=d.rand$n,n.sig=bf)\r\n }\r\n try <- try + 1\r\n }\r\n list(out=out,sig=sig,try=try)\r\n}\r\n\r\nfindGreatestNSig <- function(findSigOut){\r\n n.sigs <- lapply(findSigOut$out,function(x){x$n.sig})\r\n findSigOut$out[which.max(n.sigs)]\r\n}\r\n\r\nfindGreatestN <- function(findSigOut){\r\n n <- lapply(findSigOut$out,function(x){x$n})\r\n findSigOut$out[which.max(n)]\r\n}\r\n\r\nfindGreatestBF <- function(findSigOut){\r\n bfs <- sapply(findSigOut$out,function(x){x$n.sig$bf})\r\n findSigOut$out[which.max(bfs)]\r\n}\r\n\r\n# Get fsigOuts\r\nx <- simData(100,2)\r\nfsigOut05 <- findSig(x,10000,alpha=.05)\r\nfsigOut005 <- findSig(x,10000,alpha=.005)\r\n\r\n# Proportion of tries that found something significant\r\nfsigOut05$sig/fsigOut05$try\r\nfsigOut005$sig/fsigOut005$try\r\n\r\n# Proportion of tries finding significance, relative to alpha value\r\nfsigOut05$sig/fsigOut05$try/.05\r\nfsigOut005$sig/fsigOut005$try/.005\r\n\r\n# Find combo with the greatest number of significant results\r\nfindGreatestNSig(fsigOut05)\r\nfindGreatestNSig(fsigOut005)\r\n\r\n# Find combo with greatest N\r\nfindGreatestN(fsigOut05)\r\nfindGreatestN(fsigOut005)\r\n\r\n# BF; currently only for DV = 2\r\nfsigOutBF3 <- findBF(d=x,tryMax = 10000,BF=3)" 105 | ## [1] "library(mvtnorm)\r\nlibrary(psych)\r\nsimData <- function(n,dv){\r\n x <- rmvnorm(n,rep(0,dv),sigma=diag(1,dv))\r\n colnames(x) <- paste0('X',1:dv)\r\n x\r\n}\r\n\r\ncor.p <- function(d){\r\n cors <- corr.test(d,ci=FALSE)\r\n cors.p <- cors$p[lower.tri(cors$p)]\r\n cors.p\r\n}\r\n\r\nranSelect <- function(d){\r\n n <- round(runif(1,min=10,max=nrow(d)),digits=0)\r\n s <- sample(1:nrow(d),size=n,replace = FALSE)\r\n list(n=n,s=s,d=d[s,])\r\n}\r\n\r\nfindSig <- function(d,tryMax = 1000,alpha){\r\n out <- list()\r\n sig <- 0\r\n try <- 0\r\n while(try <= tryMax){\r\n d.rand <- ranSelect(d)\r\n corOut <- cor.p(d.rand$d)\r\n if(any(corOut <= alpha)){\r\n sig <- sig + 1\r\n out[[sig]] <- list(s=sort(d.rand$s),n=d.rand$n,n.sig=sum(corOut% \r\n filter(str_detect(OUTCOME, paste(trg, collapse=\"|\"))) %>%\r\n filter(!is.na(BL)) %>%\r\n mutate(BL, CFBP = (as.numeric(CFB)/as.numeric(BL)*100)) #%>%\r\n\r\n\r\n\r\n\r\nplotstheme <- theme(axis.title = element_text(face=\"bold\", colour=\"black\", size=18),\r\n plot.title = element_text(face=\"bold\", colour=\"black\", size=20),\r\n axis.text.x = element_text(colour=\"black\", size=16),\r\n axis.text.y = element_text(colour=\"black\", size=16),\r\n panel.background = element_rect(fill=NA),\r\n panel.grid.minor = element_line(colour = \"grey75\"),\r\n panel.grid.major = element_line(colour = \"grey75\"),\r\n panel.border = element_rect(colour=\"black\", fill=NA, size=1),\r\n legend.text=element_text(colour=\"black\", size=12),\r\n legend.title = element_text( face=\"bold\", colour=\"black\", size=14))\r\n\r\n\r\n#-------------------------Show treatment efficacy\r\n\r\nplot_labels <- list(\r\n 'DBP, CFB' = \"Diastolic blood pressure\",\r\n 'SBP, CFB' = \"Systolic blood pressure\",\r\n 'eGFR, CFB' = \"Decrease in Glomerular filtration rate\",\r\n 'UPCR, CFB' = \"Urinary protein to creatinine ratio\"\r\n)\r\n\r\nplot_labeller <- function(variable, value){\r\n return(plot_labels[value])\r\n}\r\n\r\n\r\ntrt <- ggplot(data = subsample_all) + \r\n xlim(0, 52) + \r\n xlab(\"Treatment duration, Weeks\") + \r\n ylab(\"Change from baseline, Percents\") + \r\n geom_line(aes(x = as.numeric(TF),\r\n y = CFBP,\r\n group = UID,\r\n colour = ARM),\r\n size = 1.5) + \r\n geom_hline(yintercept = 0, linetype=\"dashed\", color = \"red\", size = 1) +\r\n scale_color_discrete(name = \"Arm type\") + \r\n plotstheme + \r\n facet_grid(OUTCOME ~ ., scales = \"free\")" 107 | ## [1] "y <- function() {\r\nx <- 4/5\r\ncat(x)\r\n}\r\n\r\n# Another example thats more dynamic\r\n\r\ny <- function(numOne, numTwo) {\r\n\r\nx <- numOne / numTwo\r\n\r\ncat(x)\r\n\r\n}\r\n\r\n# soo.. calling:\r\n> y(10, 2)\r\n5 # output" 108 | ## [1] "#<>\r\ndtriang <- function(x,min=-1,mode=0,max=1,log=FALSE)\r\n#TITLE The Triangular Distribution\r\n#NAME triangular\r\n#KEYWORDS distribution\r\n#DESCRIPTION\r\n#Density, distribution function, quantile function and random generation\r\n#for the triangular distribution with minimum equal to \\samp{min}, mode equal \\samp{mode}\r\n#and maximum equal to \\samp{max}.\r\n#INPUTS\r\n#{x,q}<>\r\n#{p}<>\r\n#{n}< 1, the length is taken to be the number required.>>\r\n#[INPUTS]\r\n#{min}<>\r\n#{mode}<>\r\n#{max}<>\r\n#{log, log.p}<>\r\n#{lower.tail}< x]}.>>\r\n#VALUE\r\n#\\samp{dtriang} gives the density, \\samp{ptriang} gives the distribution function,\r\n#\\samp{qtriang} gives the quantile function, and \\samp{rtriang} generates random deviates.\r\n\r\n#EXAMPLE\r\n#curve(dtriang(x,min=3,mode=5,max=10), from = 2, to = 11)\r\n#CREATED 08-02-20\r\n#--------------------------------------------\r\n{\r\n\tif(length(x) == 0) return(x)\r\n\tquel <- x <= mode\r\n\td <- ifelse(quel,\r\n 2*(x-min)/((mode-min)*(max-min)),\r\n\t 2 *(max-x)/((max-mode)*(max-min)))\r\n\td[x < min | x > max] <- 0\r\n\td[mode < min | max < mode] <- NaN\r\n\tif(log) d <- log(d)\r\n\tif(any(is.na(d))) warning(\"NaN in dtriang\")\r\n return(d)}\r\n\r\n#<>\r\nptriang <- function(q,min=-1,mode=0,max=1,lower.tail = TRUE, log.p = FALSE)\r\n#ISALIAS dtriang\r\n#--------------------------------------------\r\n{\r\n\tif(length(q) == 0) return(q)\r\n\tquel <- q <= mode\r\n\tp <- ifelse(quel,\r\n (q-min)^2 / ((mode-min)*(max-min)),\r\n\t 1 - ((max-q)^2/((max-mode)*(max-min))))\r\n\tp[q < min] <- 0\r\n\tp[q > max] <- 1\r\n\tp[mode < min | max < mode] <- NaN\r\n if(!lower.tail) p <- 1-p\r\n if(log.p) p <- log(p)\r\n\tif(any(is.na(p))) warning(\"NaN in ptriang\")\r\n return(p)}\r\n\r\n#<>\r\nqtriang <- function(p,min=-1,mode=0,max=1,lower.tail=TRUE,log.p=FALSE)\r\n#ISALIAS dtriang\r\n#--------------------------------------------\r\n{\r\n\tif(length(p) == 0) return(p)\r\n if(log.p) p <- exp(p)\r\n\tif(!lower.tail) p <- 1-p\r\n\tquel <- p <= (mode-min)/(max-min)\r\n\tq <- ifelse(quel,\r\n min + sqrt(p*(mode-min)*(max-min)),\r\n max - sqrt((1-p)*(max-min)*(max-mode)))\r\n\tq[p < 0 | p > 1] <- NaN\r\n\tq[mode < min | max < mode] <- NaN\r\n\tif(any(is.na(q))) warning(\"NaN in qtriang\")\r\n return(q)}\r\n#<>\r\ndtriang <- function(x,min=-1,mode=0,max=1,log=FALSE)\r\n#TITLE The Triangular Distribution\r\n#NAME triangular\r\n#KEYWORDS distribution\r\n#DESCRIPTION\r\n#Density, distribution function, quantile function and random generation\r\n#for the triangular distribution with minimum equal to \\samp{min}, mode equal \\samp{mode}\r\n#and maximum equal to \\samp{max}.\r\n#INPUTS\r\n#{x,q}<>\r\n#{p}<>\r\n#{n}< 1, the length is taken to be the number required.>>\r\n#[INPUTS]\r\n#{min}<>\r\n#{mode}<>\r\n#{max}<>\r\n#{log, log.p}<>\r\n#{lower.tail}< x]}.>>\r\n#VALUE\r\n#\\samp{dtriang} gives the density, \\samp{ptriang} gives the distribution function,\r\n#\\samp{qtriang} gives the quantile function, and \\samp{rtriang} generates random deviates.\r\n\r\n#EXAMPLE\r\n#curve(dtriang(x,min=3,mode=5,max=10), from = 2, to = 11)\r\n#CREATED 08-02-20\r\n#--------------------------------------------\r\n{\r\n\tif(length(x) == 0) return(x)\r\n\tquel <- x <= mode\r\n\td <- ifelse(quel,\r\n 2*(x-min)/((mode-min)*(max-min)),\r\n\t 2 *(max-x)/((max-mode)*(max-min)))\r\n\td[x < min | x > max] <- 0\r\n\td[mode < min | max < mode] <- NaN\r\n\tif(log) d <- log(d)\r\n\tif(any(is.na(d))) warning(\"NaN in dtriang\")\r\n return(d)}\r\n\r\n#<>\r\nptriang <- function(q,min=-1,mode=0,max=1,lower.tail = TRUE, log.p = FALSE)\r\n#ISALIAS dtriang\r\n#--------------------------------------------\r\n{\r\n\tif(length(q) == 0) return(q)\r\n\tquel <- q <= mode\r\n\tp <- ifelse(quel,\r\n (q-min)^2 / ((mode-min)*(max-min)),\r\n\t 1 - ((max-q)^2/((max-mode)*(max-min))))\r\n\tp[q < min] <- 0\r\n\tp[q > max] <- 1\r\n\tp[mode < min | max < mode] <- NaN\r\n if(!lower.tail) p <- 1-p\r\n if(log.p) p <- log(p)\r\n\tif(any(is.na(p))) warning(\"NaN in ptriang\")\r\n return(p)}\r\n\r\n#<>\r\nqtriang <- function(p,min=-1,mode=0,max=1,lower.tail=TRUE,log.p=FALSE)\r\n#ISALIAS dtriang\r\n#--------------------------------------------\r\n{\r\n\tif(length(p) == 0) return(p)\r\n if(log.p) p <- exp(p)\r\n\tif(!lower.tail) p <- 1-p\r\n\tquel <- p <= (mode-min)/(max-min)\r\n\tq <- ifelse(quel,\r\n min + sqrt(p*(mode-min)*(max-min)),\r\n max - sqrt((1-p)*(max-min)*(max-mode)))\r\n\tq[p < 0 | p > 1] <- NaN\r\n\tq[mode < min | max < mode] <- NaN\r\n\tif(any(is.na(q))) warning(\"NaN in qtriang\")\r\n return(q)}\r\n\r\n\r\n#<>\r\nrtriang <- function(n,min=-1,mode=0,max=1)\r\n#ISALIAS dtriang\r\n#--------------------------------------------\r\n{ \tif(length(n) == 0) return(n)\r\n\tif(length(n) > 1) n <- length(n)\r\n\treturn(qtriang(runif(n),min=min,mode=mode,max=max,lower.tail=TRUE,log.p=FALSE))}\r\n\r\n\r\n#<>\r\nrtriang <- function(n,min=-1,mode=0,max=1)\r\n#ISALIAS dtriang\r\n#--------------------------------------------\r\n{ \tif(length(n) == 0) return(n)\r\n\tif(length(n) > 1) n <- length(n)\r\n\treturn(qtriang(runif(n),min=min,mode=mode,max=max,lower.tail=TRUE,log.p=FALSE))}" 109 | ## [1] ":/home/scratch/ed/mbc> ssh -v git@github.com\r\nOpenSSH_5.2p1, OpenSSL 0.9.8k 25 Mar 2009\r\ndebug1: Reading configuration data /etc/ssh/ssh_config\r\ndebug1: Applying options for *\r\ndebug1: Connecting to github.com [207.97.227.239] port 22.\r\ndebug1: Connection established.\r\ndebug1: identity file /home/f85/ejnovak/.ssh/identity type -1\r\ndebug1: identity file /home/f85/ejnovak/.ssh/id_rsa type 1\r\ndebug1: identity file /home/f85/ejnovak/.ssh/id_dsa type 2\r\ndebug1: Remote protocol version 2.0, remote software version OpenSSH_5.1p1 Debian-5github2\r\ndebug1: match: OpenSSH_5.1p1 Debian-5github2 pat OpenSSH*\r\ndebug1: Enabling compatibility mode for protocol 2.0\r\ndebug1: Local version string SSH-2.0-OpenSSH_5.2\r\ndebug1: SSH2_MSG_KEXINIT sent\r\ndebug1: SSH2_MSG_KEXINIT received\r\ndebug1: kex: server->client aes128-ctr hmac-md5 none\r\ndebug1: kex: client->server aes128-ctr hmac-md5 none\r\ndebug1: SSH2_MSG_KEX_DH_GEX_REQUEST(1024<1024<8192) sent\r\ndebug1: expecting SSH2_MSG_KEX_DH_GEX_GROUP\r\ndebug1: SSH2_MSG_KEX_DH_GEX_INIT sent\r\ndebug1: expecting SSH2_MSG_KEX_DH_GEX_REPLY\r\ndebug1: Host 'github.com' is known and matches the RSA host key.\r\ndebug1: Found key in /home/f85/ejnovak/.ssh/known_hosts:8\r\ndebug1: ssh_rsa_verify: signature correct\r\ndebug1: SSH2_MSG_NEWKEYS sent\r\ndebug1: expecting SSH2_MSG_NEWKEYS\r\ndebug1: SSH2_MSG_NEWKEYS received\r\ndebug1: SSH2_MSG_SERVICE_REQUEST sent\r\ndebug1: SSH2_MSG_SERVICE_ACCEPT received\r\ndebug1: Authentications that can continue: publickey\r\ndebug1: Next authentication method: publickey\r\ndebug1: Offering public key: /home/f85/ejnovak/.ssh/id_rsa\r\ndebug1: Authentications that can continue: publickey\r\ndebug1: Offering public key: /home/f85/ejnovak/.ssh/id_dsa\r\ndebug1: Authentications that can continue: publickey\r\ndebug1: Trying private key: /home/f85/ejnovak/.ssh/identity\r\ndebug1: No more authentication methods to try.\r\nPermission denied (publickey)." 110 | ## [1] " Kubik (16:13:09 18/02/2011)\r\nAko s výhercom druhej 2v2 csl spravím stebou malý rozhovor\r\n\r\n Kubik (16:13:24 18/02/2011)\r\nNazačiatok sa nám predstav. Z kade si, ako sa voláš a koľko máš rokov.\r\n\r\n 242838341@qip.ru (16:18:58 18/02/2011)\r\nZdravim, rodiče mi dali jméno Jiří, protože si nejspíš mysleli že je to cool jméno. Jsem z jedné malé vesnice na valašsku a bude mi osmnáct. Mám černou patku a jsem značně asociální.\r\n\r\n Kubik (16:19:46 18/02/2011)\r\nČiže chodíš von často ako môj kocúr? (tzn nikdy?)\r\n\r\n 242838341@qip.ru (16:20:54 18/02/2011)\r\nVen chodím celkem často. V zimě štípu dříví, krmím králíky a slepice. V létě suším seno a sbírám maliny.\r\n\r\n Kubik (16:26:18 18/02/2011)\r\nPovedz nám niečo o tvojích koníčkoch, čo robíš vo voľnom čase, ako si krátiš \"dlhé chvílky\" atď.\r\n\r\n 242838341@qip.ru (16:28:30 18/02/2011)\r\nChov dobytka, hacking, programování, webdesign a kouření konopí.\r\n\r\n 242838341@qip.ru (16:29:28 18/02/2011)\r\nNedávno to byl i Soldat, ale to už je minulost. Přecházím na hry odehrávající se ve třetí dimenzi.\r\n\r\n 242838341@qip.ru (16:46:13 18/02/2011)\r\nSi u toho nudného rozhovoru usnul ne? :D\r\n\r\n Kubik (17:05:33 18/02/2011)\r\nnie mama ma volala :-D\r\n\r\n Kubik (17:07:07 18/02/2011)\r\nČiže soldat už nieje tvoja karta. Zamerajme sa teraz na tvoju minulosť. Predsalen táto liga bola v hre soldat - koľko hrávaš soldat, aké boli naznámejšie klany v ktorých si bol. A Prezraď nám tvoju oblúbenú zbra\r\n\r\n Kubik (17:07:10 18/02/2011)\r\nň a mapu.\r\n\r\n 242838341@qip.ru (17:24:38 18/02/2011)\r\nKdyž hodně zapátrám v paměti, bylo to někdy koncem roku 05 ještě na GZ kdy jsem začal. Po pár dnech strávených na publicu jsem do té mánie zasvětil Kraschmana (mír bratře). Nějakých způsobem jsme se dostali do našeho prvního klanu vedeným Anakinem, název byl myslím STW (Anakina asi Star Wars hodně zasáhlo). První rok hraní jsem valil jenom publicy a kempil v podzemí Equinoxu, narozdíl od Kraschmana, který objevil krásu klanwarů, které já jsem neuznával. Časem jsem změnil názor a začal hrát s ním. Můj první pořádný klan byli Conzistenz kde jsem potkal mAdu (v té době s nickem Haniiz) a začal jsem to žrát naplno. Nabrali Nucíka, Scaryho, bimbase a Vita. Conzistenz se nějak rozpadlo a šli jsme pod iNsting, kde to byly fakt nejlepší časy mojí Soldat kariéry ^^. Nekonečné konverzace na Teamspeaku, 15 zápasů za den, první místo v klanbázi, porážka tenkrát neporazitelných cYs atd.\r\n\r\n 242838341@qip.ru (17:25:25 18/02/2011)\r\nto jeste neni konec moment :D\r\n\r\n 242838341@qip.ru (17:27:50 18/02/2011)\r\nPo rozpadu multiklanu Insting, jsme šli pod Team-FPS což byl poslední klan kde jsme to brali trochu vážně.\r\n\r\n 242838341@qip.ru (17:28:46 18/02/2011)\r\nZačali jsme být neaktivní, hráčů ubývalo atd.\r\n\r\n 242838341@qip.ru (17:29:43 18/02/2011)\r\nPoslední dva roky hraju prakticky jenom s mAdou, ostatní šli buď do zahraničních klanů nebo se nato vysrali úplně.\r\n\r\n 242838341@qip.ru (17:31:17 18/02/2011)\r\nTaky jsem zkoušel hrát s cizincema, ale není to ono.\r\n\r\n 242838341@qip.ru (17:35:39 18/02/2011)\r\nMoje oblíbené zbraně. Byl to vývoj, kdysi mi šly i vzduchovky jako Ruger nebo Snipy, ale nějak jsem za tu dobu ztratil aim a zlenivěl jsem. Poslední dva roky hraju prakticky jenom automaty a když mám lucky day tak emku.\r\n\r\n 242838341@qip.ru (17:36:13 18/02/2011)\r\nOblíbená mapa trochu souvisí s těma zbraněma. Když neumíš hrát nic jiného něž jsou automaty tak tě baví jen spray mapy :D\r\n\r\n 242838341@qip.ru (17:36:25 18/02/2011)\r\nSnakebite, Ash, Guardian.\r\n\r\n 242838341@qip.ru (17:38:49 18/02/2011)\r\nOk, next question.\r\n\r\n Kubik (17:44:39 18/02/2011)\r\nTvoj rozsah pamete na klany v soldate je ohromný. Zaujíma ma prečo chceš prestať hrať soldat.\r\n\r\n Kubik (17:47:57 18/02/2011)\r\nJa si nepametám ani čo som sa ťa pýtal predchvílov nie to ešte všetky moje klany.\r\n\r\n 242838341@qip.ru (17:48:26 18/02/2011)\r\nCo?\r\n\r\n 242838341@qip.ru (17:48:32 18/02/2011)\r\nNeco jsem preskocil? :D\r\n\r\n 242838341@qip.ru (17:49:18 18/02/2011)\r\nAha.\r\n\r\n 242838341@qip.ru (17:50:25 18/02/2011)\r\nNo proc chci prestat hrat. Samozřejmě s tím nekončím nadobro, jakože uninstall a konec, to ne. Spíš jde o to že mě to přestalo bavit, a není to jenom Soldat.\r\n\r\n 242838341@qip.ru (17:51:45 18/02/2011)\r\nSoldat můj život hodně ovlivnil, určitě to nebylo jen o té hře. Celkově ta komunita byla hodně unikátní, dost individuální a underground na rozdíl od ostatních her.\r\n\r\n Kubik (17:54:20 18/02/2011)\r\nDobre ďakujem. Z dôvodu že toto je rozhovor s výhercom ligy a nie výsluch na súde prejdeme k poslednej otázke.\r\nBudete sa snažiť aj nabudúci ročník obhajovať svoje víťazstvo?\r\n\r\n 242838341@qip.ru (17:55:14 18/02/2011)\r\nUrčitě. Pokud budou protihráči. Tyhle ligy jsou celkem motivace a důvod proč hrát, díky za ně.\r\n\r\n Kubik (17:56:16 18/02/2011)\r\nTento rozhovor snaď trval týžden... ale dik. Maj sa\r\n\r\n 242838341@qip.ru (17:56:37 18/02/2011)\r\nDíky a čau." 111 | 112 | Since the user is obvious: 113 | 114 | ``` r 115 | mebbe_r <- filter(r_pastes, user != "AllRls_net") 116 | walk(mebbe_r$key, ~print(toString(get_paste(.)))) 117 | ``` 118 | 119 | ## [1] "n: Anzahl Leutchen in deiner Studie\r\nx: Anzahl Leute, die deiner Theorie entsprechen (bzw. *nicht* entsprechen, der statistische Test ist ja symmetrisch)\r\np: Wahrscheinlichkeit, dass das Zahlenverhältnis als Zufall interpretiert werden kann. Unter 0.05 kann man sagen, dass da wirklich etwas handfestes vorliegt, unter etwa 0.15 kann man sagen, dass es sich lohnt, weiterzuforschen\r\n\r\n n x p\r\n2 4 0 0.125\r\n3 4 4 0.125\r\n4 5 0 0.062\r\n5 5 5 0.062\r\n6 6 0 0.031\r\n7 6 6 0.031\r\n8 7 0 0.016\r\n9 7 1 0.125\r\n10 7 6 0.125\r\n11 7 7 0.016\r\n12 8 0 0.008\r\n13 8 1 0.070\r\n14 8 7 0.070\r\n15 8 8 0.008\r\n16 9 0 0.004\r\n17 9 1 0.039\r\n18 9 8 0.039\r\n19 9 9 0.004\r\n20 10 0 0.002\r\n21 10 1 0.021\r\n22 10 2 0.109\r\n23 10 8 0.109\r\n24 10 9 0.021\r\n25 10 10 0.002\r\n26 11 0 0.001\r\n27 11 1 0.012\r\n28 11 2 0.065\r\n29 11 9 0.065\r\n30 11 10 0.012\r\n31 11 11 0.001\r\n32 12 0 0.000\r\n33 12 1 0.006\r\n34 12 2 0.039\r\n35 12 3 0.146\r\n36 12 9 0.146\r\n37 12 10 0.039\r\n38 12 11 0.006\r\n39 12 12 0.000" 120 | ## [1] "library(dplyr)\r\nlibrary(xgboost)\r\n\r\ntrain <- data.frame(\r\n X1 = runif(10000, -10, 10),\r\n X2 = runif(10000, -10, 10),\r\n X3 = runif(10000, -10, 10),\r\n X4 = runif(10000, -10, 10)\r\n) %>%\r\n mutate(Y = X1*X2*X3*X4) %>%\r\n {xgb.DMatrix(as.matrix(.[1:4]), label = .$Y)}\r\n\r\nmaxrounds = 10000\r\nearly_stop = 50\r\nnfold = 5\r\nparams <- list(\r\n objective = \"reg:linear\",\r\n max_depth = 2,\r\n eta = 0.1\r\n)\r\nrounds <- xgb.cv(params, train, nrounds = maxrounds, early_stopping_rounds = early_stop, nfold = nfold)\r\nmodel.shallow <- xgb.train(params, train, nrounds = rounds$best_iteration)\r\n\r\nparams <- list(\r\n objective = \"reg:linear\",\r\n max_depth = 5,\r\n eta = 0.1\r\n)\r\nrounds <- xgb.cv(params, train, nrounds = maxrounds, early_stopping_rounds = early_stop, nfold = nfold)\r\nmodel.deep <- xgb.train(params, train, nrounds = rounds$best_iteration)\r\n\r\ntest <- data.frame(\r\n X1 = runif(1000, -10, 10),\r\n X2 = runif(1000, -10, 10),\r\n X3 = runif(1000, -10, 10),\r\n X4 = runif(1000, -10, 10)\r\n) %>%\r\n mutate(Y = X1*X2*X3*X4)\r\n\r\npreds.shallow <- predict(model.shallow, xgb.DMatrix(as.matrix(test[1:4])))\r\npreds.deep <- predict(model.deep, xgb.DMatrix(as.matrix(test[1:4])))\r\n\r\nresults <- data.frame(preds.shallow, preds.deep, label = test$Y) %>%\r\n summarise(rmse.shallow = mean((preds.shallow - label)^2) %>% sqrt,\r\n rmse.deep = mean((preds.deep - label)^2) %>% sqrt)\r\n\r\n> results\r\n rmse.shallow rmse.deep\r\n1 1047.639 1047.561" 121 | ## [1] "library(dplyr)\r\nlibrary(xgboost)\r\n\r\ntrain <- data.frame(\r\n X1 = runif(10000, -1, 1),\r\n X2 = runif(10000, -1, 1),\r\n X3 = runif(10000, -1, 1),\r\n X4 = runif(10000, -1, 1)\r\n) %>%\r\n mutate(Y = X1*X2*X3*X4) %>%\r\n {xgb.DMatrix(as.matrix(.[1:4]), label = .$Y)}\r\n\r\nmaxrounds = 10000\r\nearly_stop = 50\r\nnfold = 5\r\nparams <- list(\r\n objective = \"reg:linear\",\r\n max_depth = 2,\r\n eta = 0.1\r\n)\r\nrounds <- xgb.cv(params, train, nrounds = maxrounds, early_stopping_rounds = early_stop, nfold = nfold)\r\nmodel.shallow <- xgb.train(params, train, nrounds = rounds$best_iteration)\r\n\r\nparams <- list(\r\n objective = \"reg:linear\",\r\n max_depth = 5,\r\n eta = 0.1\r\n)\r\nrounds <- xgb.cv(params, train, nrounds = maxrounds, early_stopping_rounds = early_stop, nfold = nfold)\r\nmodel.deep <- xgb.train(params, train, nrounds = rounds$best_iteration)\r\n\r\ntest <- data.frame(\r\n X1 = runif(1000, -1, 1),\r\n X2 = runif(1000, -1, 1),\r\n X3 = runif(1000, -1, 1),\r\n X4 = runif(1000, -1, 1)\r\n) %>%\r\n mutate(Y = X1*X2*X3*X4)\r\n\r\npreds.shallow <- predict(model.shallow, xgb.DMatrix(as.matrix(test[1:4])))\r\npreds.deep <- predict(model.deep, xgb.DMatrix(as.matrix(test[1:4])))\r\n\r\nresults <- data.frame(preds.shallow, preds.deep, label = test$Y) %>%\r\n summarise(rmse.shallow = mean((preds.shallow - label)^2) %>% sqrt,\r\n rmse.deep = mean((preds.deep - label)^2) %>% sqrt)\r\n\r\n\r\n> results\r\n rmse.shallow rmse.deep\r\n1 0.111355 0.07852135" 122 | ## [1] "library(mvtnorm)\r\nlibrary(psych)\r\nsimData <- function(n,dv){\r\n x <- rmvnorm(n,rep(0,dv),sigma=diag(1,dv))\r\n colnames(x) <- paste0('X',1:dv)\r\n x\r\n}\r\n\r\ncor.p <- function(d){\r\n cors <- corr.test(d,ci=FALSE)\r\n cors.p <- cors$p[lower.tri(cors$p)]\r\n cors.p\r\n}\r\n\r\nranSelect <- function(d){\r\n n <- round(runif(1,min=10,max=nrow(d)),digits=0)\r\n s <- sample(1:nrow(d),size=n,replace = FALSE)\r\n list(n=n,s=s,d=d[s,])\r\n}\r\n\r\nfindSig <- function(d,tryMax = 1000,alpha){\r\n out <- list()\r\n sig <- 0\r\n try <- 0\r\n while(try <= tryMax){\r\n d.rand <- ranSelect(d)\r\n corOut <- cor.p(d.rand$d)\r\n if(any(corOut <= alpha)){\r\n sig <- sig + 1\r\n out[[sig]] <- list(s=sort(d.rand$s),n=d.rand$n,n.sig=sum(corOut BF){\r\n sig <- sig + 1\r\n out[[sig]] <- list(s=sort(d.rand$s),n=d.rand$n,n.sig=bf)\r\n }\r\n try <- try + 1\r\n }\r\n list(out=out,sig=sig,try=try)\r\n}\r\n\r\nfindGreatestNSig <- function(findSigOut){\r\n n.sigs <- lapply(findSigOut$out,function(x){x$n.sig})\r\n findSigOut$out[which.max(n.sigs)]\r\n}\r\n\r\nfindGreatestN <- function(findSigOut){\r\n n <- lapply(findSigOut$out,function(x){x$n})\r\n findSigOut$out[which.max(n)]\r\n}\r\n\r\nfindGreatestBF <- function(findSigOut){\r\n bfs <- sapply(findSigOut$out,function(x){x$n.sig$bf})\r\n findSigOut$out[which.max(bfs)]\r\n}\r\n\r\n# Get fsigOuts\r\nx <- simData(100,2)\r\nfsigOut05 <- findSig(x,10000,alpha=.05)\r\nfsigOut005 <- findSig(x,10000,alpha=.005)\r\n\r\n# Proportion of tries that found something significant\r\nfsigOut05$sig/fsigOut05$try\r\nfsigOut005$sig/fsigOut005$try\r\n\r\n# Proportion of tries finding significance, relative to alpha value\r\nfsigOut05$sig/fsigOut05$try/.05\r\nfsigOut005$sig/fsigOut005$try/.005\r\n\r\n# Find combo with the greatest number of significant results\r\nfindGreatestNSig(fsigOut05)\r\nfindGreatestNSig(fsigOut005)\r\n\r\n# Find combo with greatest N\r\nfindGreatestN(fsigOut05)\r\nfindGreatestN(fsigOut005)\r\n\r\n# BF; currently only for DV = 2\r\nfsigOutBF3 <- findBF(d=x,tryMax = 10000,BF=3)" 123 | ## [1] "library(mvtnorm)\r\nlibrary(psych)\r\nsimData <- function(n,dv){\r\n x <- rmvnorm(n,rep(0,dv),sigma=diag(1,dv))\r\n colnames(x) <- paste0('X',1:dv)\r\n x\r\n}\r\n\r\ncor.p <- function(d){\r\n cors <- corr.test(d,ci=FALSE)\r\n cors.p <- cors$p[lower.tri(cors$p)]\r\n cors.p\r\n}\r\n\r\nranSelect <- function(d){\r\n n <- round(runif(1,min=10,max=nrow(d)),digits=0)\r\n s <- sample(1:nrow(d),size=n,replace = FALSE)\r\n list(n=n,s=s,d=d[s,])\r\n}\r\n\r\nfindSig <- function(d,tryMax = 1000,alpha){\r\n out <- list()\r\n sig <- 0\r\n try <- 0\r\n while(try <= tryMax){\r\n d.rand <- ranSelect(d)\r\n corOut <- cor.p(d.rand$d)\r\n if(any(corOut <= alpha)){\r\n sig <- sig + 1\r\n out[[sig]] <- list(s=sort(d.rand$s),n=d.rand$n,n.sig=sum(corOut% \r\n filter(str_detect(OUTCOME, paste(trg, collapse=\"|\"))) %>%\r\n filter(!is.na(BL)) %>%\r\n mutate(BL, CFBP = (as.numeric(CFB)/as.numeric(BL)*100)) #%>%\r\n\r\n\r\n\r\n\r\nplotstheme <- theme(axis.title = element_text(face=\"bold\", colour=\"black\", size=18),\r\n plot.title = element_text(face=\"bold\", colour=\"black\", size=20),\r\n axis.text.x = element_text(colour=\"black\", size=16),\r\n axis.text.y = element_text(colour=\"black\", size=16),\r\n panel.background = element_rect(fill=NA),\r\n panel.grid.minor = element_line(colour = \"grey75\"),\r\n panel.grid.major = element_line(colour = \"grey75\"),\r\n panel.border = element_rect(colour=\"black\", fill=NA, size=1),\r\n legend.text=element_text(colour=\"black\", size=12),\r\n legend.title = element_text( face=\"bold\", colour=\"black\", size=14))\r\n\r\n\r\n#-------------------------Show treatment efficacy\r\n\r\nplot_labels <- list(\r\n 'DBP, CFB' = \"Diastolic blood pressure\",\r\n 'SBP, CFB' = \"Systolic blood pressure\",\r\n 'eGFR, CFB' = \"Decrease in Glomerular filtration rate\",\r\n 'UPCR, CFB' = \"Urinary protein to creatinine ratio\"\r\n)\r\n\r\nplot_labeller <- function(variable, value){\r\n return(plot_labels[value])\r\n}\r\n\r\n\r\ntrt <- ggplot(data = subsample_all) + \r\n xlim(0, 52) + \r\n xlab(\"Treatment duration, Weeks\") + \r\n ylab(\"Change from baseline, Percents\") + \r\n geom_line(aes(x = as.numeric(TF),\r\n y = CFBP,\r\n group = UID,\r\n colour = ARM),\r\n size = 1.5) + \r\n geom_hline(yintercept = 0, linetype=\"dashed\", color = \"red\", size = 1) +\r\n scale_color_discrete(name = \"Arm type\") + \r\n plotstheme + \r\n facet_grid(OUTCOME ~ ., scales = \"free\")" 125 | ## [1] "y <- function() {\r\nx <- 4/5\r\ncat(x)\r\n}\r\n\r\n# Another example thats more dynamic\r\n\r\ny <- function(numOne, numTwo) {\r\n\r\nx <- numOne / numTwo\r\n\r\ncat(x)\r\n\r\n}\r\n\r\n# soo.. calling:\r\n> y(10, 2)\r\n5 # output" 126 | ## [1] "#<>\r\ndtriang <- function(x,min=-1,mode=0,max=1,log=FALSE)\r\n#TITLE The Triangular Distribution\r\n#NAME triangular\r\n#KEYWORDS distribution\r\n#DESCRIPTION\r\n#Density, distribution function, quantile function and random generation\r\n#for the triangular distribution with minimum equal to \\samp{min}, mode equal \\samp{mode}\r\n#and maximum equal to \\samp{max}.\r\n#INPUTS\r\n#{x,q}<>\r\n#{p}<>\r\n#{n}< 1, the length is taken to be the number required.>>\r\n#[INPUTS]\r\n#{min}<>\r\n#{mode}<>\r\n#{max}<>\r\n#{log, log.p}<>\r\n#{lower.tail}< x]}.>>\r\n#VALUE\r\n#\\samp{dtriang} gives the density, \\samp{ptriang} gives the distribution function,\r\n#\\samp{qtriang} gives the quantile function, and \\samp{rtriang} generates random deviates.\r\n\r\n#EXAMPLE\r\n#curve(dtriang(x,min=3,mode=5,max=10), from = 2, to = 11)\r\n#CREATED 08-02-20\r\n#--------------------------------------------\r\n{\r\n\tif(length(x) == 0) return(x)\r\n\tquel <- x <= mode\r\n\td <- ifelse(quel,\r\n 2*(x-min)/((mode-min)*(max-min)),\r\n\t 2 *(max-x)/((max-mode)*(max-min)))\r\n\td[x < min | x > max] <- 0\r\n\td[mode < min | max < mode] <- NaN\r\n\tif(log) d <- log(d)\r\n\tif(any(is.na(d))) warning(\"NaN in dtriang\")\r\n return(d)}\r\n\r\n#<>\r\nptriang <- function(q,min=-1,mode=0,max=1,lower.tail = TRUE, log.p = FALSE)\r\n#ISALIAS dtriang\r\n#--------------------------------------------\r\n{\r\n\tif(length(q) == 0) return(q)\r\n\tquel <- q <= mode\r\n\tp <- ifelse(quel,\r\n (q-min)^2 / ((mode-min)*(max-min)),\r\n\t 1 - ((max-q)^2/((max-mode)*(max-min))))\r\n\tp[q < min] <- 0\r\n\tp[q > max] <- 1\r\n\tp[mode < min | max < mode] <- NaN\r\n if(!lower.tail) p <- 1-p\r\n if(log.p) p <- log(p)\r\n\tif(any(is.na(p))) warning(\"NaN in ptriang\")\r\n return(p)}\r\n\r\n#<>\r\nqtriang <- function(p,min=-1,mode=0,max=1,lower.tail=TRUE,log.p=FALSE)\r\n#ISALIAS dtriang\r\n#--------------------------------------------\r\n{\r\n\tif(length(p) == 0) return(p)\r\n if(log.p) p <- exp(p)\r\n\tif(!lower.tail) p <- 1-p\r\n\tquel <- p <= (mode-min)/(max-min)\r\n\tq <- ifelse(quel,\r\n min + sqrt(p*(mode-min)*(max-min)),\r\n max - sqrt((1-p)*(max-min)*(max-mode)))\r\n\tq[p < 0 | p > 1] <- NaN\r\n\tq[mode < min | max < mode] <- NaN\r\n\tif(any(is.na(q))) warning(\"NaN in qtriang\")\r\n return(q)}\r\n#<>\r\ndtriang <- function(x,min=-1,mode=0,max=1,log=FALSE)\r\n#TITLE The Triangular Distribution\r\n#NAME triangular\r\n#KEYWORDS distribution\r\n#DESCRIPTION\r\n#Density, distribution function, quantile function and random generation\r\n#for the triangular distribution with minimum equal to \\samp{min}, mode equal \\samp{mode}\r\n#and maximum equal to \\samp{max}.\r\n#INPUTS\r\n#{x,q}<>\r\n#{p}<>\r\n#{n}< 1, the length is taken to be the number required.>>\r\n#[INPUTS]\r\n#{min}<>\r\n#{mode}<>\r\n#{max}<>\r\n#{log, log.p}<>\r\n#{lower.tail}< x]}.>>\r\n#VALUE\r\n#\\samp{dtriang} gives the density, \\samp{ptriang} gives the distribution function,\r\n#\\samp{qtriang} gives the quantile function, and \\samp{rtriang} generates random deviates.\r\n\r\n#EXAMPLE\r\n#curve(dtriang(x,min=3,mode=5,max=10), from = 2, to = 11)\r\n#CREATED 08-02-20\r\n#--------------------------------------------\r\n{\r\n\tif(length(x) == 0) return(x)\r\n\tquel <- x <= mode\r\n\td <- ifelse(quel,\r\n 2*(x-min)/((mode-min)*(max-min)),\r\n\t 2 *(max-x)/((max-mode)*(max-min)))\r\n\td[x < min | x > max] <- 0\r\n\td[mode < min | max < mode] <- NaN\r\n\tif(log) d <- log(d)\r\n\tif(any(is.na(d))) warning(\"NaN in dtriang\")\r\n return(d)}\r\n\r\n#<>\r\nptriang <- function(q,min=-1,mode=0,max=1,lower.tail = TRUE, log.p = FALSE)\r\n#ISALIAS dtriang\r\n#--------------------------------------------\r\n{\r\n\tif(length(q) == 0) return(q)\r\n\tquel <- q <= mode\r\n\tp <- ifelse(quel,\r\n (q-min)^2 / ((mode-min)*(max-min)),\r\n\t 1 - ((max-q)^2/((max-mode)*(max-min))))\r\n\tp[q < min] <- 0\r\n\tp[q > max] <- 1\r\n\tp[mode < min | max < mode] <- NaN\r\n if(!lower.tail) p <- 1-p\r\n if(log.p) p <- log(p)\r\n\tif(any(is.na(p))) warning(\"NaN in ptriang\")\r\n return(p)}\r\n\r\n#<>\r\nqtriang <- function(p,min=-1,mode=0,max=1,lower.tail=TRUE,log.p=FALSE)\r\n#ISALIAS dtriang\r\n#--------------------------------------------\r\n{\r\n\tif(length(p) == 0) return(p)\r\n if(log.p) p <- exp(p)\r\n\tif(!lower.tail) p <- 1-p\r\n\tquel <- p <= (mode-min)/(max-min)\r\n\tq <- ifelse(quel,\r\n min + sqrt(p*(mode-min)*(max-min)),\r\n max - sqrt((1-p)*(max-min)*(max-mode)))\r\n\tq[p < 0 | p > 1] <- NaN\r\n\tq[mode < min | max < mode] <- NaN\r\n\tif(any(is.na(q))) warning(\"NaN in qtriang\")\r\n return(q)}\r\n\r\n\r\n#<>\r\nrtriang <- function(n,min=-1,mode=0,max=1)\r\n#ISALIAS dtriang\r\n#--------------------------------------------\r\n{ \tif(length(n) == 0) return(n)\r\n\tif(length(n) > 1) n <- length(n)\r\n\treturn(qtriang(runif(n),min=min,mode=mode,max=max,lower.tail=TRUE,log.p=FALSE))}\r\n\r\n\r\n#<>\r\nrtriang <- function(n,min=-1,mode=0,max=1)\r\n#ISALIAS dtriang\r\n#--------------------------------------------\r\n{ \tif(length(n) == 0) return(n)\r\n\tif(length(n) > 1) n <- length(n)\r\n\treturn(qtriang(runif(n),min=min,mode=mode,max=max,lower.tail=TRUE,log.p=FALSE))}" 127 | ## [1] ":/home/scratch/ed/mbc> ssh -v git@github.com\r\nOpenSSH_5.2p1, OpenSSL 0.9.8k 25 Mar 2009\r\ndebug1: Reading configuration data /etc/ssh/ssh_config\r\ndebug1: Applying options for *\r\ndebug1: Connecting to github.com [207.97.227.239] port 22.\r\ndebug1: Connection established.\r\ndebug1: identity file /home/f85/ejnovak/.ssh/identity type -1\r\ndebug1: identity file /home/f85/ejnovak/.ssh/id_rsa type 1\r\ndebug1: identity file /home/f85/ejnovak/.ssh/id_dsa type 2\r\ndebug1: Remote protocol version 2.0, remote software version OpenSSH_5.1p1 Debian-5github2\r\ndebug1: match: OpenSSH_5.1p1 Debian-5github2 pat OpenSSH*\r\ndebug1: Enabling compatibility mode for protocol 2.0\r\ndebug1: Local version string SSH-2.0-OpenSSH_5.2\r\ndebug1: SSH2_MSG_KEXINIT sent\r\ndebug1: SSH2_MSG_KEXINIT received\r\ndebug1: kex: server->client aes128-ctr hmac-md5 none\r\ndebug1: kex: client->server aes128-ctr hmac-md5 none\r\ndebug1: SSH2_MSG_KEX_DH_GEX_REQUEST(1024<1024<8192) sent\r\ndebug1: expecting SSH2_MSG_KEX_DH_GEX_GROUP\r\ndebug1: SSH2_MSG_KEX_DH_GEX_INIT sent\r\ndebug1: expecting SSH2_MSG_KEX_DH_GEX_REPLY\r\ndebug1: Host 'github.com' is known and matches the RSA host key.\r\ndebug1: Found key in /home/f85/ejnovak/.ssh/known_hosts:8\r\ndebug1: ssh_rsa_verify: signature correct\r\ndebug1: SSH2_MSG_NEWKEYS sent\r\ndebug1: expecting SSH2_MSG_NEWKEYS\r\ndebug1: SSH2_MSG_NEWKEYS received\r\ndebug1: SSH2_MSG_SERVICE_REQUEST sent\r\ndebug1: SSH2_MSG_SERVICE_ACCEPT received\r\ndebug1: Authentications that can continue: publickey\r\ndebug1: Next authentication method: publickey\r\ndebug1: Offering public key: /home/f85/ejnovak/.ssh/id_rsa\r\ndebug1: Authentications that can continue: publickey\r\ndebug1: Offering public key: /home/f85/ejnovak/.ssh/id_dsa\r\ndebug1: Authentications that can continue: publickey\r\ndebug1: Trying private key: /home/f85/ejnovak/.ssh/identity\r\ndebug1: No more authentication methods to try.\r\nPermission denied (publickey)." 128 | ## [1] " Kubik (16:13:09 18/02/2011)\r\nAko s výhercom druhej 2v2 csl spravím stebou malý rozhovor\r\n\r\n Kubik (16:13:24 18/02/2011)\r\nNazačiatok sa nám predstav. Z kade si, ako sa voláš a koľko máš rokov.\r\n\r\n 242838341@qip.ru (16:18:58 18/02/2011)\r\nZdravim, rodiče mi dali jméno Jiří, protože si nejspíš mysleli že je to cool jméno. Jsem z jedné malé vesnice na valašsku a bude mi osmnáct. Mám černou patku a jsem značně asociální.\r\n\r\n Kubik (16:19:46 18/02/2011)\r\nČiže chodíš von často ako môj kocúr? (tzn nikdy?)\r\n\r\n 242838341@qip.ru (16:20:54 18/02/2011)\r\nVen chodím celkem často. V zimě štípu dříví, krmím králíky a slepice. V létě suším seno a sbírám maliny.\r\n\r\n Kubik (16:26:18 18/02/2011)\r\nPovedz nám niečo o tvojích koníčkoch, čo robíš vo voľnom čase, ako si krátiš \"dlhé chvílky\" atď.\r\n\r\n 242838341@qip.ru (16:28:30 18/02/2011)\r\nChov dobytka, hacking, programování, webdesign a kouření konopí.\r\n\r\n 242838341@qip.ru (16:29:28 18/02/2011)\r\nNedávno to byl i Soldat, ale to už je minulost. Přecházím na hry odehrávající se ve třetí dimenzi.\r\n\r\n 242838341@qip.ru (16:46:13 18/02/2011)\r\nSi u toho nudného rozhovoru usnul ne? :D\r\n\r\n Kubik (17:05:33 18/02/2011)\r\nnie mama ma volala :-D\r\n\r\n Kubik (17:07:07 18/02/2011)\r\nČiže soldat už nieje tvoja karta. Zamerajme sa teraz na tvoju minulosť. Predsalen táto liga bola v hre soldat - koľko hrávaš soldat, aké boli naznámejšie klany v ktorých si bol. A Prezraď nám tvoju oblúbenú zbra\r\n\r\n Kubik (17:07:10 18/02/2011)\r\nň a mapu.\r\n\r\n 242838341@qip.ru (17:24:38 18/02/2011)\r\nKdyž hodně zapátrám v paměti, bylo to někdy koncem roku 05 ještě na GZ kdy jsem začal. Po pár dnech strávených na publicu jsem do té mánie zasvětil Kraschmana (mír bratře). Nějakých způsobem jsme se dostali do našeho prvního klanu vedeným Anakinem, název byl myslím STW (Anakina asi Star Wars hodně zasáhlo). První rok hraní jsem valil jenom publicy a kempil v podzemí Equinoxu, narozdíl od Kraschmana, který objevil krásu klanwarů, které já jsem neuznával. Časem jsem změnil názor a začal hrát s ním. Můj první pořádný klan byli Conzistenz kde jsem potkal mAdu (v té době s nickem Haniiz) a začal jsem to žrát naplno. Nabrali Nucíka, Scaryho, bimbase a Vita. Conzistenz se nějak rozpadlo a šli jsme pod iNsting, kde to byly fakt nejlepší časy mojí Soldat kariéry ^^. Nekonečné konverzace na Teamspeaku, 15 zápasů za den, první místo v klanbázi, porážka tenkrát neporazitelných cYs atd.\r\n\r\n 242838341@qip.ru (17:25:25 18/02/2011)\r\nto jeste neni konec moment :D\r\n\r\n 242838341@qip.ru (17:27:50 18/02/2011)\r\nPo rozpadu multiklanu Insting, jsme šli pod Team-FPS což byl poslední klan kde jsme to brali trochu vážně.\r\n\r\n 242838341@qip.ru (17:28:46 18/02/2011)\r\nZačali jsme být neaktivní, hráčů ubývalo atd.\r\n\r\n 242838341@qip.ru (17:29:43 18/02/2011)\r\nPoslední dva roky hraju prakticky jenom s mAdou, ostatní šli buď do zahraničních klanů nebo se nato vysrali úplně.\r\n\r\n 242838341@qip.ru (17:31:17 18/02/2011)\r\nTaky jsem zkoušel hrát s cizincema, ale není to ono.\r\n\r\n 242838341@qip.ru (17:35:39 18/02/2011)\r\nMoje oblíbené zbraně. Byl to vývoj, kdysi mi šly i vzduchovky jako Ruger nebo Snipy, ale nějak jsem za tu dobu ztratil aim a zlenivěl jsem. Poslední dva roky hraju prakticky jenom automaty a když mám lucky day tak emku.\r\n\r\n 242838341@qip.ru (17:36:13 18/02/2011)\r\nOblíbená mapa trochu souvisí s těma zbraněma. Když neumíš hrát nic jiného něž jsou automaty tak tě baví jen spray mapy :D\r\n\r\n 242838341@qip.ru (17:36:25 18/02/2011)\r\nSnakebite, Ash, Guardian.\r\n\r\n 242838341@qip.ru (17:38:49 18/02/2011)\r\nOk, next question.\r\n\r\n Kubik (17:44:39 18/02/2011)\r\nTvoj rozsah pamete na klany v soldate je ohromný. Zaujíma ma prečo chceš prestať hrať soldat.\r\n\r\n Kubik (17:47:57 18/02/2011)\r\nJa si nepametám ani čo som sa ťa pýtal predchvílov nie to ešte všetky moje klany.\r\n\r\n 242838341@qip.ru (17:48:26 18/02/2011)\r\nCo?\r\n\r\n 242838341@qip.ru (17:48:32 18/02/2011)\r\nNeco jsem preskocil? :D\r\n\r\n 242838341@qip.ru (17:49:18 18/02/2011)\r\nAha.\r\n\r\n 242838341@qip.ru (17:50:25 18/02/2011)\r\nNo proc chci prestat hrat. Samozřejmě s tím nekončím nadobro, jakože uninstall a konec, to ne. Spíš jde o to že mě to přestalo bavit, a není to jenom Soldat.\r\n\r\n 242838341@qip.ru (17:51:45 18/02/2011)\r\nSoldat můj život hodně ovlivnil, určitě to nebylo jen o té hře. Celkově ta komunita byla hodně unikátní, dost individuální a underground na rozdíl od ostatních her.\r\n\r\n Kubik (17:54:20 18/02/2011)\r\nDobre ďakujem. Z dôvodu že toto je rozhovor s výhercom ligy a nie výsluch na súde prejdeme k poslednej otázke.\r\nBudete sa snažiť aj nabudúci ročník obhajovať svoje víťazstvo?\r\n\r\n 242838341@qip.ru (17:55:14 18/02/2011)\r\nUrčitě. Pokud budou protihráči. Tyhle ligy jsou celkem motivace a důvod proč hrát, díky za ně.\r\n\r\n Kubik (17:56:16 18/02/2011)\r\nTento rozhovor snaď trval týžden... ale dik. Maj sa\r\n\r\n 242838341@qip.ru (17:56:37 18/02/2011)\r\nDíky a čau." 129 | ## [1] "# Zona altimetrica dei Comuni dell'Emilia Romagna\r\n# http://statistica.regione.emilia-romagna.it/allegati/codifiche/zona_altim.csv/view\r\n#\r\n#\r\n# Legenda\r\n# 1 montagna interna \r\n# 2 montagna litoranea \r\n# 3 collina interna \r\n# 4 collina litoranea \r\n# 5 pianura\r\n# http://statistica.regione.emilia-romagna.it/allegati/codifiche/za.rtf/view\r\n#\r\n# Attenzione\r\n# non sono presenti i seguenti comuni che sono stati creati dalla fusione di comuni preesistenti\r\n# 034049 Sissa Trecasali (pianura)\r\n# 035046 Ventasso (montagna interna)\r\n# 034050 Polesine Zibello (pianura)\r\n# 099028 Poggio Torriana (collina interna)\r\n\r\ndati <- read.csv2(\"http://statistica.regione.emilia-romagna.it/allegati/codifiche/zona_altim.csv/at_download/file/zona_altim_per_wcm.csv\", as.is=TRUE)\r\n\r\ndati <- rbind(dati,\r\nc(8,34,49,34049,\"Sissa Trecasali\",5),\r\nc(8,35,46,35046,\"Ventasso\",1),\r\nc(8,34,50,34050,\"Polesine Zibello\",5),\r\nc(8,99,28,99028,\"Poggio Torriana\",3))" 130 | ## [1] "*********** Welcome to Carder007 ************\r\n\r\nHI ALL NEW CLIENT\r\nIM BIG HACKER GOOD\r\n- I'm is Professional seller,more than 6 years experience,i have sold cvv credit card to many customers all over the world.\r\n- Selling cvv, fullz many country as: Canada,USA,Australia,UK...all And many country in Europe: Fr,Ger,Spain,Ita...\r\n- I hope we will work together for a long time.\r\n- Always sell cvv quality with high balance.\r\n- I have a website but if you want buy cvv good price please contact me.\r\n- carder007 register, carder007 login, carder007 cc, buy cvv, buy cc, buy bank login, buy bank logs, buy dumps, buy dumps with pin, carder007 registration, carder007 all you need, carder007 cvv\r\n\r\n\r\nContact me:\r\nSupport manager shop (English)\r\nYahoo: carder007 .ru\r\nICQ : 674586348\r\nEmail : carder007.ru@yahoo.com\r\nWebsite : http://Carder007s.com\r\n http://Carder007.surf\r\n http://Carder007.info\r\n http://Carder007u.us\r\n http://carder0077.tv\r\n\r\n \r\n \r\n \r\n___________________ CCV !! CCN ______________________\r\n\r\nList cc and my price..\r\n\r\n- Us (Visa,Master) = 10$ per 1\r\n- Us (Amex,Dis) = 12$ per 1\r\n- Us Bin 15$ , US Dob 25$\r\n- Us fullz info = 30$ per 1\r\n--------------------------------\r\n- Uk (Visa,Master) = 15$ per 1\r\n- Uk (Amex,Dis) = 20$ per 1\r\n- Uk Bin 20$ , UK Dob 25$\r\n- Uk fullz info = 35$ per 1\r\n--------------------------------\r\n- Ca (Visa,Master) = 20$ per 1\r\n- Ca (Amex,Dis) = 25$ per 1\r\n- Ca Bin 20$ , CA Dob 25$\r\n- Ca fullz info = 35$ per 1\r\n--------------------------------\r\n- Au (Visa,Master) = 20$ per 1\r\n- Au (Amex,Dis) = 25$ per 1\r\n- Au Bin 23$ , AU Dob 30$\r\n- Au fullz info = 30$ per 1\r\n--------------------------------\r\n- Eu (Visa,Master) = 25$ per 1\r\n- Eu (Amex,Dis) = 28$ per 1\r\n- Eu Bin 30$ , AU Dob 35$\r\n- Eu fullz info = 45$ per 1\r\n--------------------------------\r\n- RDP = 25$\r\n- SMTP = 30$ ( All Country )\r\n- Italy = 25$ per 1 (fullz info = 40$)\r\n- Spain = 25$ per 1 (fullz info = 40$)\r\n- Denmark = 30$ per1 (fullz info = 40$)\r\n- Sweden = 25$ per 1 (fullz info = 40$)\r\n- France = 25$ per 1 (fullz info = 40$)\r\n- Germany = 25$ per 1 (fullz info = 40$)\r\n- Ireland = 25$ per 1 (fullz info = 40$)\r\n- Mexico = 20$ per 1 (fullz info = 35$)\r\n- Asia = 20$ per 1 (fullz info = 35$)\r\n\r\n_________i Only Exchange WU to PM , WU to WMZ_________\r\n\r\n- 100$ WU = 100$ PM\r\n- 200$ WU = 200$ PM\r\n- 100$ WU = 110$ WMZ\r\n- 200$ WU = 210$ WMZ\r\n\r\n________________ Do WU transfer ______________________\r\n\r\n- 700$ for MTCN 8000$\r\n- 550$ for MTCN 6000$\r\n- 400$ for MTCN 4000$\r\n- 200$ for MTCN 1500$\r\n\r\n__________ Bank Logins Prices US UK CA AU EU _____________\r\n\r\n. Bank Us : ( Bank of america,HALIFAX,BOA,CHASE,Wells Fargo...)\r\n. Balance 3000$ = 150$\r\n. Balance 5000$ = 250$\r\n. Balance 8000$ = 400$\r\n. Balance 12000$ = 600$\r\n. Balance 15000$ = 800$\r\n. Balance 20000$ = 1000$\r\n\r\n- Bank UK : ( LLOYDS TSB,BARCLAYS,Standard Chartered,HSBC...)\r\n. Balance 5000 GBP = 300$\r\n. Balance 12000 GBP = 600$\r\n. Balance 16000 GBP = 700$\r\n. Balance 20000 GBP = 1000$\r\n. Balance 30000 GBP = 1200$\r\n\r\n__________________ PayPal account _______________________\r\n\r\n= Account Paypal 1500$ = 200$\r\n= Account Paypal 2500$ = 250$\r\n= Account Paypal 4000$ = 350$\r\n= Account Paypal 7000$ = 550$\r\n\r\n_____________ Dumps track 1 track 2 with pin _____________\r\n\r\n- Dumps,Tracks 1&2 Us = 110$ per 1\r\n- Dumps,Tracks 1&2 Uk = 120$ per 1\r\n- Dumps,Tracks 1&2 Ca = 120$ per 1\r\n- Dumps,Tracks 1&2 Au = 120$ per 1\r\n- Dumps,Tracks 1&2 Eu = 150$ per 1\r\n\r\n-Sample Dump + Pin:\r\nTrack1 : B4096663104697113^FORANTO/CHRI STOPHER M^09061012735200521000000 ,\r\nTrack2 : 4096663104697113=0906101273525 21\r\nPin : 1783\r\n___________________________________________________________\r\n\r\n-WARRANTY time is 10 HOURS. Any cvv purchase over 10 hours can not warranty.\r\n-If you buy over 30 cvvs, i will sell for you best price.\r\n-I will discount for you if you are reseller or you order everyday many on the next day.\r\n-I will prove to you that I am the best sellers. And make sure you will enjoy doing business with me.\r\n-I accept Bitcoin ,WU (western union) , WMZ (webmoney) or MoneyGram...\r\n\r\nContact me\r\nSupport manager shop (English)\r\nYahoo: carder007 .ru\r\nICQ : 674586348\r\nEmail : carder007.ru@yahoo.com\r\nWebsite : http://Carder007s.com\r\n http://Carder007.surf\r\n http://Carder007.info\r\n http://Carder007u.us\r\n http://carder0077.tv\r\n\r\n\r\ncarder007 register\r\ncarder007 login\r\ncarder007 cc\r\nbuy cvv\r\nbuy cc\r\nbuy bank logins\r\nbuy bank logs\r\nbuy dumps \r\nbuy dumps with pin" 131 | ## [1] "N<-2000\r\n\r\nstart<-Sys.time()\r\ninvisible(solve(matrix(rnorm(N^2)*(1:{N^2}),N,N)))\r\nprint(Sys.time()-start)" 132 | ## [1] "library(parallel)\r\nN<-2000\r\n\r\nlst<-list()\r\nfor (i in 1:{detectCores()}) {\r\n lst[[i]]=matrix(rnorm(N^2)*(1:{N^2}),N,N)\r\n}\r\ncl<-makeCluster(detectCores())\r\nstart<-Sys.time()\r\ninvisible(clusterMap(cl, solve, lst))\r\nprint(Sys.time()-start)\r\nstopCluster(cl)" 133 | ## [1] "library(parallel)\r\nN<-2000\r\n\r\nlst<-list()\r\nfor (i in 1:{detectCores()}) {\r\n lst[[i]]=matrix(rnorm(N^2)*(1:{N^2}),N,N)\r\n}\r\ncl<-makeCluster(detectCores())\r\nstart<-Sys.time()\r\nclusterApply(cl, solve, lst)\r\nprint(Sys.time()-start)\r\nstopCluster(cl)" 134 | ## [1] "library(parallel)\r\nN<-2000\r\n\r\nlst<-list()\r\nfor (i in 1:{detectCores()/2}) {\r\n lst[[i]]=matrix(rnorm(N^2)*(1:{N^2}),N,N)\r\n}\r\ncl<-makeCluster(detectCores()/2)\r\nstart<-Sys.time()\r\nclusterApply(cl, solve, lst)\r\nprint(Sys.time()-start)\r\nstopCluster(cl)" 135 | ## [1] "# LIBRARIES\r\nlibrary(dplyr)\r\nlibrary(neuralnet)\r\n\r\n# OPTIONS\r\noptions(scipen = 999)\r\nstart.time <- Sys.time()\r\n\r\n# DATA\r\ntrain <- read.csv(\"data/train_v2.csv\")\r\ntest <- read.csv(\"data/test_v2.csv\")\r\n\r\n# DATA MANIPULATION\r\n# replace NA values with column average\r\nrep_train <- function() {\r\n means = NULL\r\n for(i in (1 : 771)) {\r\n means[i] <- mean(train[, i], na.rm = TRUE)\r\n train[, i][is.na(train[, i])] <- means[i]\r\n }\r\n return(train)\r\n}\r\n\r\nrep_test <- function() {\r\n means = NULL\r\n for(i in (1 : 770)) {\r\n means[i] <- mean(test[, i], na.rm = TRUE)\r\n test[, i][is.na(test[, i])] <- means[i]\r\n }\r\n return(test)\r\n}\r\n\r\ntrain <- rep_train()\r\ntest <- rep_test()\r\n\r\n# STANDARDIZATION\r\n# some columns have high variances, scaling necessary\r\nfind_variances <- function() {\r\n for(i in (1 : 771)) {\r\n if(var(train[, i]) > 1) {\r\n print(i)\r\n }\r\n }\r\n}\r\n\r\n# MODEL\r\nn <- names(train)\r\nf <- as.formula(paste('loss ~', paste(n[!n %in% \"loss\"], collapse = \" + \")))\r\nnn <- neuralnet(f, data = train, hidden = c(3, 2), linear.output = TRUE)\r\npredicted <- compute(nn, test)\r\n\r\nfor(i in 1 : length(predicted$net.result)) {\r\n if(predicted$net.result[i] < 0.5) {\r\n predicted$net.result[i] <- 0\r\n }\r\n else {\r\n predicted$net.result[i] <- 1\r\n }\r\n}\r\n\r\nids <- test$id\r\nresults <- cbind(ids, predicted$net.result)\r\ncolnames(results) <- c('id', 'loss')\r\n\r\nwrite.csv(results, 'data/results.csv', row.names = FALSE)\r\n\r\n\r\nend.time <- Sys.time()\r\nelapsed <- end.time - start.time" 136 | ## [1] "library(ggplot2)\r\nlibrary(ggmap)\r\nlibrary(maps)\r\nlibrary(mapdata)\r\n\r\n#######################\r\n###### POINTS #######\r\n#######################\r\n#Load and filter\r\ndata<-read.csv(\"~/Feeld/results-20170723-180046.csv\")\r\nmaps <- data %>% \r\n mutate(lng=round(lng,3)) %>% \r\n mutate(lat=round(lat,3)) %>% \r\n group_by(lat,lng,dim_market) %>% \r\n summarize(total_rates=sum(total_rates))\r\nhead(maps)\r\n\r\n#Create plot_map function\r\nplot_map <- function(map) {\r\n bc_bbox <- make_bbox(lat = lat, lon = lng, data = map)\r\n bc_bbox\r\n bc_big <- get_map(location = bc_bbox, source = \"google\", maptype = \"terrain\")\r\n ggmap(bc_big,legend=\"none\") + \r\n geom_point(data=map,alpha=0.2,mapping = aes(x = lng, y = lat), size=3)\r\n}\r\n\r\n#Apply it to each factor\r\nby(data=maps,INDICES=maps$dim_market,FUN=plot_map)\r\n\r\n#######################\r\n###### HEATMAP #######\r\n#######################\r\n#Load and filter\r\ndata<-read.csv(\"~/Feeld/results-20170723-180046.csv\")\r\nmaps <- data %>% \r\n mutate(lng=round(lng,3)) %>% \r\n mutate(lat=round(lat,3))\r\nhead(maps)\r\n\r\n#Heat map function\r\nplot_heat_map <- function(map) {\r\n bc_bbox <- make_bbox(lat = lat, lon = lng, data = map)\r\n bc_bbox\r\n bc_big <- get_map(location = bc_bbox, source = \"google\", maptype = \"terrain\")\r\n ggmap(bc_big) + \r\n geom_density2d(data = maps, aes(x = lng, y = lat, fill = ..level.., alpha = ..level..), size = 0.3) + \r\n stat_density2d(data = maps, \r\n aes(x = lng, y = lat, fill = ..level.., alpha = ..level..), size = 0.01, \r\n bins = 15, geom = \"polygon\") + \r\n scale_fill_gradient(low = \"green\", high = \"red\") + \r\n scale_alpha(range = c(0, 0.3), guide = FALSE)\r\n}\r\n\r\n#Plot\r\nby(data=maps,INDICES=maps$dim_market,FUN=plot_heat_map)" 137 | ## [1] "# Plot a standard normal curve in R\r\nx = seq(-3.5,3.5,0.1)\r\n\r\npnorm(x)\r\n\r\nplot(x,dnorm(x),type=\"l\")" 138 | ## [1] "Iteration 1 Lambda: 0.02787363 \r\nIteration 2 Lambda: -0.0003133725\r\nSuck it Trebek\r\nIteration 4 Lambda: 0.02845428 \r\nIteration 5 Lambda: 0.0008845864 \r\nIteration 6 Lambda: 0.02760765 \r\nIteration 7 Lambda: 0.02681197 \r\nIteration 8 Lambda: 0.002981385 \r\nIteration 9 Lambda: 0.02428759 \r\nIteration 10 Lambda: 0.02227737 \r\nIteration 11 Lambda: 0.009059805 \r\nIteration 12 Lambda: 0.01739554 \r\nIteration 13 Lambda: 0.01556047 \r\nIteration 14 Lambda: 0.01423330 \r\nIteration 15 Lambda: 0.01447612 \r\nIteration 16 Lambda: 0.0144591 \r\nIteration 17 Lambda: 0.01445883 \r\nIteration 18 Lambda: 0.01445883" 139 | ## [1] "Iteration 1 Lambda: 0.02787363 \r\nIteration 2 Lambda: -0.0003133725 \r\nIteration 3 Lambda: 0.02816089 \r\nIteration 4 Lambda: 0.02845428 \r\nIteration 5 Lambda: 0.0008845864 \r\nIteration 6 Lambda: 0.02760765 \r\nIteration 7 Lambda: 0.02681197 \r\nIteration 8 Lambda: 0.002981385 \r\nIteration 9 Lambda: 0.02428759 \r\nIteration 10 Lambda: 0.02227737 \r\nIteration 11 Lambda: 0.009059805 \r\nIteration 12 Lambda: 0.01739554 \r\nIteration 13 Lambda: 0.01556047 \r\nIteration 14 Lambda: 0.01423330 \r\nIteration 15 Lambda: 0.01447612 \r\nIteration 16 Lambda: 0.0144591 \r\nIteration 17 Lambda: 0.01445883 \r\nIteration 18 Lambda: 0.01445883" 140 | ## [1] "f<-function(x) {\r\n x[x < pi] <- 1/sqrt(2)\r\n x[x >= pi & x < 2*pi] <- -1/sqrt(2)\r\n x[x >= 2*pi & x < 3*pi] <- 1/sqrt(2)\r\n x[x >= 3*pi] <- -1/sqrt(2)\r\n return(x)\r\n}\r\n\r\na<-function(x,i) {\r\n return(sin(i*pi*x) + cos(i*pi*x))\r\n}\r\n\r\nx<-seq(0,4*pi,length=200)\r\ny<-f(x)\r\n\r\ncols<-100\r\nX<-matrix(0,nrow=length(x),ncol=cols)\r\nfor(i in (1:cols)) {\r\n X[,i]<-a(x,i)\r\n}\r\nb.hat<-solve(t(X)%*%X,t(X))%*%y\r\n\r\nplot(x,y,type=\"l\")\r\nlines(x,X%*%b.hat,col=\"blue\")\r\nlines(smooth.spline(x,y),col=\"red\")" 141 | ## [1] "f<-function(x) {\r\n x[x < pi] <- 1/sqrt(2)\r\n x[x >= pi & x < 2*pi] <- -1/sqrt(2)\r\n x[x >= 2*pi & x < 3*pi] <- 1/sqrt(2)\r\n x[x >= 3*pi] <- -1/sqrt(2)\r\n return(x)\r\n}\r\n\r\na<-function(x,i) {\r\n return(sin(i*x) + cos(i*x))\r\n}\r\n\r\nx<-seq(0,4*pi,length=100)\r\ny<-f(x)\r\n\r\ncols<-80\r\nX<-matrix(0,nrow=length(x),ncol=cols)\r\nfor(i in (1:cols)) {\r\n X[,i]<-a(x,i)\r\n}\r\nb.hat<-solve(t(X)%*%X,t(X))%*%y\r\n\r\nplot(x,y,type=\"l\")\r\nlines(x,X%*%b.hat,col=\"blue\")" 142 | ## [1] "makeCacheMatrix <- function(x=matrix()){\r\n i <- NULL\r\n setMatrix <- function(b) {\r\n x <<- b\r\n i <<- NULL\r\n }\r\n \r\n getMatrix <- function() x\r\n \r\n setInverse <- function(i) i <<- solve(x)\r\n \r\n getInverse <- function() i\r\n \r\n list(setMatrix = setMatrix, getMatrix = getMatrix,\r\n setInverse = setInverse, getInverse = getInverse)\r\n}\r\n\r\ncacheSolve <- function(x, ...) {\r\n i <- x$getInverse()\r\n \r\n if((!is.null(i))) {\r\n message(\"getting cached inverse\")\r\n return(i)\r\n }\r\n \r\n message(\"computing new inverse\")\r\n unknown <- x$getMatrix()\r\n i <- x$setInverse(unknown)\r\n i\r\n \r\n}" 143 | ## [1] "plot(c, x[,3], xlab=\"\", ylab=\"\", col=\"blue\", t=\"l\", xlim=c(0,520), ylim=c(5,28))\r\npar(new=T)\r\nplot(c, y[,3], xlab=\"Coord.\", ylab=\"Temp.\", col=\"red\", t=\"l\", xlim=c(0,520), ylim=c(5,28))" 144 | ## [1] "plot(1, 1)" 145 | ## [1] "library(dlm)\r\nlibrary(doParallel)\r\ncl = makeCluster(detectCores()-1)\r\nregisterDoParallel(cl)\r\n\r\nt100 <- proc.time()\r\nx <- foreach(j = seq(1,1000),.combine = 'rbind',.packages = c(\"dlm\",\"doParallel\")) %dopar% {\r\n\r\n phi = runif(1,.5,.9)\t\r\n sig_epsilon_2 = runif(1,1.1,2)\r\n print(j)\r\n\r\nforeach(i = seq(1,100),.combine='+',.packages = c(\"dlm\",\"doParallel\")) %do% {\r\n count1_3 = 0\r\n count2_3 = 0\r\n\tprint(\"i\")\r\n\tprint(i)\r\n\tprint(\"j\")\r\n\tprint(j)\r\n\tt<- proc.time()\r\n\tnobs <- 250\t\r\n\t#This is the AR(1) model\r\n\ty_t <- arima.sim(n=nobs,list(ar=phi,ma=0),sd=sqrt(sig_epsilon_2))\r\n\t#We constrain the 1st parameter to be less than one and \r\n\t#the second parameter to be positive.\r\n\tparm_rest <- function(parm){\r\n\t \treturn( c(exp(parm[1])/(1+exp(parm[1])),exp(parm[2])) ) \r\n\t}\r\n\t \t\r\n \toriginal_parameters <- c(phi,sig_epsilon_2)\r\n\tssm_ar1<- function(parm) {\r\n\t\tparm<- parm_rest(parm)\r\n\t\treturn(dlm(FF=1,V=0,GG=parm[1],W=parm[2],\r\n\t\tm0=0,C0=solve(1-parm[1]^2)*parm[2]))\r\n\t}\r\n\r\n\tresult_3 <- dlmMLE(y_t,parm=c(0,0),build=ssm_ar1,hessian=T)\r\n\r\n\tcoef_3 <- parm_rest(result_3$par)\r\n\r\n\tdg1_3 <- exp(result_3$par[1])/(1+exp(result_3$par[1]))^2\r\n\tdg2_3<- exp(result_3$par[2])\r\n\tdg_3<- diag(c(dg1_3,dg2_3))\r\n\tmyvar_3 <- dg_3%*%solve(result_3$hessian)%*%dg_3\r\n\tmyvar_3 <- sqrt(diag(myvar_3))\r\n\r\n\tlower_3 = coef_3 - 1.96 * myvar_3\r\n\tupper_3 = coef_3 + 1.96 * myvar_3\r\n\r\n \r\n############################################################################################################################################\r\n# count1_3 is the count of the number of times out of 100 that phi falls OUTSIDE it's 95% CI built using dlmMLE\r\n# count1_3 is ~Binomial(100,.05) and should have a mean of size * p = 100 * .05 = 5 and\r\n# a variance of size * p * q = 100 * .05* .95 = 4.75\r\n# Similarly for count2_3.\r\n# \r\n# Define Y_n = X_1 + ... + X_n where X_i takes the value 1 in the event that\r\n# the (generated) original parameter does not lie in it's 95% CI on the ith iteration and 0 otherwise.\r\n# We compute Y_100 (since i goes from 1 to 100) \r\n# We can regard this as one realization of Binomial distribution with size=100 and p=.05 (probability of a parameter\r\n# falling outside it's CI). \r\n# We repeat the above process j= 1000 times and accumulate the results in count1_1_vector.This will be ~Binomial(n=1000,size=100,p=.05)\r\n# \r\n############################################################################################################################################\r\n \r\n print(\"Time taken :\")\r\n print(proc.time()-t)\r\n\r\n \tifelse(lower_3[1] <= original_parameters[1] && original_parameters[1] <= upper_3[1],print(\"phi within CI\"),\r\n {print(original_parameters[1]); print(\"phi outside\") ; count1_3= count1_3 +1} )\r\n\r\n ifelse(lower_3[2] <= original_parameters[2] && original_parameters[2] <= upper_3[2],print(\"sigma_epsilon_2 within CI\"),{ print(\"sigma_epsilon_2 outside\") ; count2_3= count2_3 +1} )\r\n \r\n return(c(count1_3,count2_3))\r\n \r\n \r\n}\r\n}\r\n\r\nprint(proc.time()-t100)\r\n\r\nexpected_distribution<- rbinom(1000,100,.05)\r\n\r\ncount1_3_vector <- x[,1]\r\ncount2_3_vector <- x[,2]\r\n\r\npdf(\"Graph-phi-dlmMLE.pdf\",width = 5.6,height = 3.8)\r\npar(mai=c(.8, .8, .3, .2))\r\nqqplot(expected_distribution,count1_3_vector,main=expression(paste(\"Graph for \",phi)),xlab=\"Expected distribution\",ylab=\"Observed values\")\r\nqqline(count1_3_vector,distribution = function(probs) { qbinom(probs, size=100, prob=0.05) },col = \"red\",lwd = 2)\r\ndev.off()\r\n\r\n\r\npdf(\"Graph-phi-dlmMLE-jitter.pdf\",width = 5.6,height = 3.8)\r\npar(mai=c(.8, .8, .3, .2))\r\nqqplot(jitter(expected_distribution),jitter(count1_3_vector),main=expression(paste(\"Graph for \",phi,\" : jittered\")),xlab=\"Expected distribution\",ylab=\"Observed values\")\r\nqqline(count1_3_vector,distribution = function(probs) { qbinom(probs, size=100, prob=0.05) },col = \"red\",lwd = 2)\r\ndev.off()\r\n\r\npdf(\"Graph-sigma-dlmMLE.pdf\",width = 5.6,height = 3.8)\r\npar(mai=c(.8, .8, .3, .2))\r\nqqplot(expected_distribution,count2_3_vector,main=expression(paste(\"Graph for \",sigma^2)),xlab=\"Expected distribution\",ylab = \"Observed values\")\r\nqqline(count2_3_vector,distribution = function(probs) { qbinom(probs, size=100, prob=0.05) },col = \"red\",lwd = 2)\r\ndev.off()\r\n\r\n\r\npdf(\"Graph-sigma-dlmMLE-jitter.pdf\",width = 5.6,height = 3.8)\r\npar(mai=c(.8, .8, .3, .2))\r\nqqplot(jitter(expected_distribution),jitter(count2_3_vector),main=expression(paste(\"Graph for \",sigma^2,\" : jittered\")),xlab=\"Expected distribution\",ylab = \"Observed values\")\r\nqqline(count2_3_vector,distribution = function(probs) { qbinom(probs, size=100, prob=0.05) },col = \"red\",lwd = 2)\r\ndev.off()" 146 | ## [1] "require(foreach)\r\n\r\nt.rolls <- c(13, 17, 4, 16, 16, 18, 4, 11, 16, 13, 11, 14, 13, 7, 13, 8, 15, 17, 9, 15, 13, 17, 16, 4, 10, 9, 13, 12, 3, 14, 17, 15, 19, 16, 12, 11, 16, 9, 15, 13, 6, 7, 18, 7, 16, 12, 13, 14, 16, 16, 11, 20, 10, 20, 13, 12, 13, 15, 12, 12, 13, 14, 15, 19)\r\n\r\nset.seed(29348029)\r\nnullDistSize <- 1000\r\nnullDist <- foreach(i = 1:nullDistSize, .combine = 'c') %do% {\r\n mean(ceiling(runif(length(t.rolls), min = 0, max = 20)))\r\n}\r\n\r\nhist(nullDist, breaks = seq(1, 20, by = .1), xlab = \"Mean\", main = \"Mean of 64 d20 rolls\")\r\nabline(v = mean(t.rolls), col = \"red\")\r\ntext(y = 50, x = mean(t.rolls), \"Mean of Travis' rolls\", col = \"red\", pos = 4)" 147 | ## [1] "*********** Welcome to Carder007 ************\r\n\r\nHI ALL NEW CLIENT\r\nIM BIG HACKER GOOD\r\n- I'm is Professional seller,more than 6 years experience,i have sold cvv credit card to many customers all over the world.\r\n- Selling cvv, fullz many country as: Canada,USA,Australia,UK...all And many country in Europe: Fr,Ger,Spain,Ita...\r\n- I hope we will work together for a long time.\r\n- Always sell cvv quality with high balance.\r\n- I have a website but if you want buy cvv good price please contact me.\r\n- carder007 register, carder007 login, carder007 cc, buy cvv, buy cc, buy bank login, buy bank logs, buy dumps, buy dumps with pin, carder007 registration, carder007 all you need, carder007 cvv\r\n\r\n\r\nContact me:\r\nSupport manager shop (English)\r\nYahoo: carder007 .ru\r\nICQ : 674586348\r\nEmail : carder007.ru@yahoo.com\r\nWebsite : http://Carder007s.com\r\n http://Carder007.surf\r\n http://Carder007.info\r\n http://Carder007u.us\r\n http://carder0077.tv\r\n\r\n \r\n \r\n \r\n___________________ CCV !! CCN ______________________\r\n\r\nList cc and my price..\r\n\r\n- Us (Visa,Master) = 10$ per 1\r\n- Us (Amex,Dis) = 12$ per 1\r\n- Us Bin 15$ , US Dob 25$\r\n- Us fullz info = 30$ per 1\r\n--------------------------------\r\n- Uk (Visa,Master) = 15$ per 1\r\n- Uk (Amex,Dis) = 20$ per 1\r\n- Uk Bin 20$ , UK Dob 25$\r\n- Uk fullz info = 35$ per 1\r\n--------------------------------\r\n- Ca (Visa,Master) = 20$ per 1\r\n- Ca (Amex,Dis) = 25$ per 1\r\n- Ca Bin 20$ , CA Dob 25$\r\n- Ca fullz info = 35$ per 1\r\n--------------------------------\r\n- Au (Visa,Master) = 20$ per 1\r\n- Au (Amex,Dis) = 25$ per 1\r\n- Au Bin 23$ , AU Dob 30$\r\n- Au fullz info = 30$ per 1\r\n--------------------------------\r\n- Eu (Visa,Master) = 25$ per 1\r\n- Eu (Amex,Dis) = 28$ per 1\r\n- Eu Bin 30$ , AU Dob 35$\r\n- Eu fullz info = 45$ per 1\r\n--------------------------------\r\n- RDP = 25$\r\n- SMTP = 30$ ( All Country )\r\n- Italy = 25$ per 1 (fullz info = 40$)\r\n- Spain = 25$ per 1 (fullz info = 40$)\r\n- Denmark = 30$ per1 (fullz info = 40$)\r\n- Sweden = 25$ per 1 (fullz info = 40$)\r\n- France = 25$ per 1 (fullz info = 40$)\r\n- Germany = 25$ per 1 (fullz info = 40$)\r\n- Ireland = 25$ per 1 (fullz info = 40$)\r\n- Mexico = 20$ per 1 (fullz info = 35$)\r\n- Asia = 20$ per 1 (fullz info = 35$)\r\n\r\n_________i Only Exchange WU to PM , WU to WMZ_________\r\n\r\n- 100$ WU = 100$ PM\r\n- 200$ WU = 200$ PM\r\n- 100$ WU = 110$ WMZ\r\n- 200$ WU = 210$ WMZ\r\n\r\n________________ Do WU transfer ______________________\r\n\r\n- 700$ for MTCN 8000$\r\n- 550$ for MTCN 6000$\r\n- 400$ for MTCN 4000$\r\n- 200$ for MTCN 1500$\r\n\r\n__________ Bank Logins Prices US UK CA AU EU _____________\r\n\r\n. Bank Us : ( Bank of america,HALIFAX,BOA,CHASE,Wells Fargo...)\r\n. Balance 3000$ = 150$\r\n. Balance 5000$ = 250$\r\n. Balance 8000$ = 400$\r\n. Balance 12000$ = 600$\r\n. Balance 15000$ = 800$\r\n. Balance 20000$ = 1000$\r\n\r\n- Bank UK : ( LLOYDS TSB,BARCLAYS,Standard Chartered,HSBC...)\r\n. Balance 5000 GBP = 300$\r\n. Balance 12000 GBP = 600$\r\n. Balance 16000 GBP = 700$\r\n. Balance 20000 GBP = 1000$\r\n. Balance 30000 GBP = 1200$\r\n\r\n__________________ PayPal account _______________________\r\n\r\n= Account Paypal 1500$ = 200$\r\n= Account Paypal 2500$ = 250$\r\n= Account Paypal 4000$ = 350$\r\n= Account Paypal 7000$ = 550$\r\n\r\n_____________ Dumps track 1 track 2 with pin _____________\r\n\r\n- Dumps,Tracks 1&2 Us = 110$ per 1\r\n- Dumps,Tracks 1&2 Uk = 120$ per 1\r\n- Dumps,Tracks 1&2 Ca = 120$ per 1\r\n- Dumps,Tracks 1&2 Au = 120$ per 1\r\n- Dumps,Tracks 1&2 Eu = 150$ per 1\r\n\r\n-Sample Dump + Pin:\r\nTrack1 : B4096663104697113^FORANTO/CHRI STOPHER M^09061012735200521000000 ,\r\nTrack2 : 4096663104697113=0906101273525 21\r\nPin : 1783\r\n___________________________________________________________\r\n\r\n-WARRANTY time is 10 HOURS. Any cvv purchase over 10 hours can not warranty.\r\n-If you buy over 30 cvvs, i will sell for you best price.\r\n-I will discount for you if you are reseller or you order everyday many on the next day.\r\n-I will prove to you that I am the best sellers. And make sure you will enjoy doing business with me.\r\n-I accept Bitcoin ,WU (western union) , WMZ (webmoney) or MoneyGram...\r\n\r\nContact me\r\nSupport manager shop (English)\r\nYahoo: carder007 .ru\r\nICQ : 674586348\r\nEmail : carder007.ru@yahoo.com\r\nWebsite : http://Carder007s.com\r\n http://Carder007.surf\r\n http://Carder007.info\r\n http://Carder007u.us\r\n http://carder0077.tv\r\n\r\n\r\ncarder007 register\r\ncarder007 login\r\ncarder007 cc\r\nbuy cvv\r\nbuy cc\r\nbuy bank logins\r\nbuy bank logs\r\nbuy dumps \r\nbuy dumps with pin" 148 | ## [1] "> colvals = c(\"character\",\"character\",rep(\"integer\",each=50))\r\n> HAM2 <- read.csv (file=\"HAM RNA AmpliSeq counts p2.csv\",header=F,skip=2,colClasses = colvals)\r\nError in scan(file = file, what = what, sep = sep, quote = quote, dec = dec, :\r\n scan() expected 'an integer', got 'SEC24B-AS1'\r\n> HAM2 <- read.csv (file=\"HAM RNA AmpliSeq counts p2.csv\",header=F,skip=2) \r\n> HAM2[1:4,1:4]\r\n V1 V2 V3 V4\r\n1 SEC24B-AS1 AMPL37741840 5 1\r\n2 A1BG AMPL17425613 0 0\r\n3 A1CF AMPL36593459 0 0\r\n4 GGACT AMPL17367653 2 3\r\n> colvals[1:4]\r\n[1] \"character\" \"character\" \"integer\" \"integer\"\r\n> print (\"WTF?\")\r\n[1] \"WTF?\"\r\n> dim(HAM2)\r\n[1] 20812 52\r\n> grep(\"SEC24B-AS1\",HAM2[,1])\r\n[1] 1\r\n>" 149 | ## [1] "> colvals = c(\"character\",\"character\",rep(\"integer\",each=50))\r\n> HAM2 <- read.csv (file=\"HAM RNA AmpliSeq counts p2.csv\",header=F,skip=2,colClaError in scan(file = file, what = what, sep = sep, quote = quote, dec = dec, :\r\n scan() expected 'an integer', got 'SEC24B-AS1'\r\n> HAM2 <- read.csv (file=\"HAM RNA AmpliSeq counts p2.csv\",header=F,skip=2) > HAM2[1:4,1:4]\r\n V1 V2 V3 V4\r\n1 SEC24B-AS1 AMPL37741840 5 1\r\n2 A1BG AMPL17425613 0 0\r\n3 A1CF AMPL36593459 0 0\r\n4 GGACT AMPL17367653 2 3\r\n> colvals[1:4]\r\n[1] \"character\" \"character\" \"integer\" \"integer\"\r\n> print (\"WTF?\")\r\n[1] \"WTF?\"\r\n> dim(HAM2)\r\n[1] 20812 52\r\n> grep(\"SEC24B-AS1\",HAM2[,1])\r\n[1] 1\r\n>" 150 | ## [1] "#-A8-#\r\n\r\ndf = data.frame(\"Schule\" = c(rep(\"S1\", 3), rep(\"S2\", 3), rep(\"S3\", 3)),\r\n\t\"Methode\" = rep(c(\"L1\", \"L2\", \"L3\"), 3), \"Score\" = c(56, 41, 23, 62,\r\n\t53, 29, 62, 71, 53))\r\n\r\ny.mean = mean(df$Score)\r\nalpha = NA\r\nfor(i in 1:3){ \r\n\talpha[i] = mean(df$Score[df$Schule == paste0(\"S\",i)]) - y.mean\r\n}\r\nbeta = NA\r\nfor(i in 1:3){ \r\n\tbeta[i] = mean(df$Score[df$Methode == paste0(\"L\",i)]) - y.mean\r\n}\r\n\r\nalpha.beta = NA\r\nfor(i in 1:3){\r\nalpha.beta[i] = df$Score[i] - alpha[i] - beta[1] - y.mean\r\n}\r\n\r\nfor(i in 1:3){\r\nalpha.beta[3+i] = df$Score[3+i] - alpha[i] - beta[2] - y.mean\r\n} \r\n\r\nfor(i in 1:3){\r\nalpha.beta[6+i] = df$Score[6+i] - alpha[i] - beta[3] - y.mean\r\n} \r\n\r\naov = (aov(Score ~ Error(Schule / Methode), df))\r\n\r\ndf$predicted = predict(lm(Score ~ Schule + Methode, df)) #ohne zufällige Effekte\r\ndf$pre.err = predict(lm(aov(Score ~ Error(Schule / Methode), df)))" 151 | ## [1] "info<-read.table('data.txt', header=TRUE)\r\n\r\nprint('table read stored as info')\r\nprint(summary(info))\r\n\r\n\r\nrawTimes<-info[1]\r\nprint (rawTimes[520:525, 1])\r\n\r\nts.cur <- rawTimes[1:length(rawTimes)-1, 1]\r\nts.next <- rawTimes[2:length(rawTimes), 1]\r\n\r\nanswer<-ts.cur - ts.next\r\n\r\nprint(answer)" 152 | ## [1] "# Spelling corrector in R\r\n# Claudio Sacchettini\r\n#\r\n# translated from\r\n# How to Write a Spelling Corrector (Peter Norvig)\r\n# http://norvig.com/spell-correct.html\r\n\r\n\r\nwords <- function(text) strsplit(tolower(text),'[^a-z]+')[[1]]\r\n\r\ntrain <- function(features) tapply(features, features, length)\r\n\r\ncon <- file(\"big.txt\", \"r\")\r\nNWORDS = train(words(readChar(con,10000000)))\r\nclose(con)\r\n\r\nalphabet = \"abcdefghijklmnopqrstuvwxyz\"\r\n\r\nedits1 <- function(word) {\r\n a <- vector()\r\n b <- vector()\r\n for (i in 0:nchar(word)) {a[i+1] <- substring(word,1,i)\r\n b[i+1] <- substring(word,i+1,nchar(word))}\r\n c <- unlist(strsplit(alphabet, NULL))\r\n deletes <- paste(a[b!=\"\"],substring(b[b!=\"\"],2), sep=\"\")\r\n transposes <- paste(a, substring(b[length(b)>1],2,2), substring(b[length(b)>1],1,1), substring(b[length(b)>1],3), sep=\"\")\r\n replaces <- paste(rep(a[b!=\"\"],each=nchar(alphabet)), rep(c,nchar(word)), rep(substring(b[b!=\"\"],2),each=nchar(alphabet)), sep=\"\")\r\n inserts <- paste(rep(a,each=nchar(alphabet)), rep(c,nchar(word)), rep(b,each=nchar(alphabet)), sep=\"\")\r\n return(unique(c(deletes, transposes, replaces, inserts)))\r\n }\r\n\r\nknown_edits2 <- function(word) {\r\n e2 <- vector()\r\n for (e1 in 1:length(edits1(word))) {\r\n e2 <- c(e2, edits1(edits1(word)[e1]))\r\n }\r\n return(unique(e2[e2 %in% names(NWORDS)]))\r\n }\r\n\r\nknown <- function(words) words[words %in% names(NWORDS)]\r\n\r\ncorrection <- function(word) {\r\n candidates <- if(length(known(word))>0) known(word) else (if(length(known(edits1(word)))>0) known(edits1(word)) else (if(length(known_edits2(word))>0) known_edits2(word) else word))\r\n return(if (length(candidates)==1 & candidates[1]==word) candidates else names(which.max(NWORDS[names(NWORDS) %in% candidates])))\r\n }" 153 | ## [1] "rm(list = ls())\r\nlibrary(dplyr)\r\nlibrary(reshape2)\r\n\r\n#--store results\r\nmat_arry <- array(dim = c(5000, 2, 8))\r\n\r\n#-litter icc\r\nlit_icc <- seq(0, 0.7, .1)\r\n\r\nfor(k in 1:length(lit_icc)) {\r\n for(i in 1:5000){\r\n icc_loop <- lit_icc[k]\r\n v_overall <- 10\r\n n_litters <- 8\r\n pups_litter <- 4\r\n v_litter <- icc_loop * v_overall\r\n v_error <- v_overall - v_litter\r\n litter <- rep(1:n_litters, each = pups_litter)\r\n # two treatments\r\n treat <- rep(0:1, each = pups_litter * n_litters / 2)\r\n treat <- factor(treat, labels = c('C', 'T'))\r\n # litter effect\r\n litter_eff <- rnorm(n_litters, 0, sqrt(v_litter))\r\n # residual\r\n residual <- rnorm(n_litters * pups_litter, 0, sqrt(v_error))\r\n # the outcome measure\r\n y <- 5 + 0 * (treat == 'T') + litter_eff[litter] + residual\r\n litter <- factor(paste0('l', litter))\r\n my_data <- data.frame(litter, treat, y)\r\n mat_arry[i, 1, k] <- lmerTest::rand(lmerTest::lmer(y ~ treat + (1|litter), \r\n data = my_data))$rand.table$p.value\r\n m_lm <- stats::lm(y ~ treat, data = my_data)\r\n mat_arry[i, 2, k] <- anova(m_lm)[1, 5]\r\n }\r\n}\r\n\r\nmt_df <- melt(mat_arry) \r\n\r\nmt_1 <- mt_df %>% filter(Var2 == 1)\r\nmt_2 <- mt_df %>% filter(Var2 == 2)\r\n\r\ntemp <- data.frame(icc = mt_1$Var3, u_0 = mt_1$value, \r\n p_lm = mt_2$value)\r\n\r\ndf_non <- temp %>% filter(u_0 < 0.05)\r\ndf_sig <- temp %>% filter(u_0 > 0.05)\r\n\r\n\r\nt1_non <- df_non %>% group_by(icc) %>% summarise(mean(p_lm < 0.05))\r\nt1_sig <- df_sig %>% group_by(icc) %>% summarise(mean(p_lm < 0.05))\r\n\r\nresults <- data.frame(cond = rep(c(\"sig\", \"non_sig\"), each = 8), \r\n icc = rep(seq(0, .7, .1), 2), \r\n t1 = c(t1_sig$`mean(p_lm < 0.05)`, \r\n t1_non$`mean(p_lm < 0.05)`))\r\n\r\nwrite.csv(results, \"conditional_t1_results.csv\")" 154 | ## [1] "# This script uses some of the R and JAGS code\r\n# that was provided by Scheibehenne et al. in their rejoinder\r\n# See osf.io/hjt65 to download the necessary files\r\nsource(\"bridge_sampling_functions.R\")\r\nsource(\"jags_functions.R\")\r\n\r\nsim_studies <- function(nstudies, mu, tau, v) {\r\n true_eff <- rnorm(nstudies, mu, tau)\r\n # use bootstrapping to sample sampling variances\r\n v <- sample(v, nstudies, replace = TRUE)\r\n y <- rnorm(nstudies, true_eff, sqrt(v))\r\n data.frame(y, v)\r\n}\r\n\r\nsim_models <- function(i, conds, metadata, nsim) {\r\n require(LaplacesDemon)\r\n require(truncnorm)\r\n require(R2jags)\r\n require(metafor)\r\n require(mvtnorm)\r\n require(Brobdingnag)\r\n message(\"Condition: \", i)\r\n results <- vector(\"list\", nsim)\r\n for (n in seq_len(nsim)) {\r\n # simulate data\r\n sim_data <- sim_studies(\r\n conds$nstudies[i], conds$mu[i], conds$tau[i], metadata$vi\r\n )\r\n # fixed effects models\r\n data_fixed <- list(\r\n y = sim_data$y, V = sim_data$v, Nstud = dim(sim_data)[1]\r\n )\r\n # fixed MA model: H0\r\n logml.fixed.H0 <- log.ml.fixed.H0(data_fixed)\r\n # fixed MA model: H1 (unrestricted)\r\n post.samples.fixed <- get.samples.fixed.H1.unrestricted(data_fixed)\r\n lb <- -Inf\r\n ub <- Inf\r\n names(lb) <- names(ub) <- \"d.fixed\"\r\n bs.fixed.H1.unrestricted <- bridge.sampler(\r\n post.samples = post.samples.fixed,\r\n log.posterior = log.posterior.fixed.H1.unrestricted,\r\n data = data_fixed, lb = lb, ub = ub\r\n )\r\n logml.fixed.H1.unrestricted <- bs.fixed.H1.unrestricted$logml\r\n # random effects models\r\n data_random <- list(\r\n y = sim_data$y, V = sim_data$v, \r\n Nstud = dim(sim_data)[1],\r\n prior.scaleTau = conds$scaleTau[i]\r\n )\r\n # random MA model: H0\r\n post.samples.random.H0 <- get.samples.random.H0(data_random)\r\n lb <- rep(-Inf, ncol(post.samples.random.H0))\r\n names(lb) <- colnames(post.samples.random.H0)\r\n lb[[ \"tau\" ]] <- 0\r\n ub <- rep(Inf, ncol(post.samples.random.H0))\r\n names(ub) <- colnames(post.samples.random.H0)\r\n bs.random.H0 <- bridge.sampler(\r\n post.samples = post.samples.random.H0,\r\n log.posterior = log.posterior.random.H0,\r\n data = data_random, lb = lb, ub = ub\r\n )\r\n logml.random.H0 <- bs.random.H0$logml\r\n # random MA model: H1 (unrestricted)\r\n post.samples.random <- get.samples.random.H1.unrestricted(data_random)\r\n lb <- rep(-Inf, ncol(post.samples.random))\r\n names(lb) <- colnames(post.samples.random)\r\n lb[[ \"tau\" ]] <- 0\r\n ub <- rep(Inf, ncol(post.samples.random))\r\n names(ub) <- colnames(post.samples.random)\r\n bs.random.H1.unrestricted <- bridge.sampler(\r\n post.samples = post.samples.random,\r\n log.posterior = log.posterior.random.H1.unrestricted,\r\n data = data_random, lb = lb, ub = ub\r\n )\r\n logml.random.H1.unrestricted <- bs.random.H1.unrestricted$logml\r\n \r\n # frequentist analysis\r\n rma_fit <- metafor::rma(y ~ 1, vi = v, data = sim_data)\r\n rma_fit_summary <- summary(rma_fit)\r\n \r\n # save results\r\n results[[n]] <- list(\r\n logOR_fixed = mean(post.samples.fixed[, \"d.fixed\"]),\r\n logOR_fixed_lower = quantile(post.samples.fixed[, \"d.fixed\"], 0.025),\r\n logOR_fixed_upper = quantile(post.samples.fixed[, \"d.fixed\"], 0.975),\r\n logOR_random = mean(post.samples.random[, \"d.rand\"]),\r\n logOR_random_lower = quantile(post.samples.random[, \"d.rand\"], 0.025),\r\n logOR_random_upper = quantile(post.samples.random[, \"d.rand\"], 0.975),\r\n tau = mean(post.samples.random[, \"tau\"]),\r\n tau_lower = quantile(post.samples.random[, \"tau\"], 0.025),\r\n tau_upper = quantile(post.samples.random[, \"tau\"], 0.975),\r\n BF_fixed_random = exp(\r\n logml.fixed.H1.unrestricted - logml.random.H1.unrestricted\r\n ),\r\n BF10_fixed = exp(logml.fixed.H1.unrestricted - logml.fixed.H0),\r\n BF10_random = exp(logml.random.H1.unrestricted - logml.random.H0),\r\n logOR_rma = rma_fit_summary$b[1, 1],\r\n logOR_rma_lower = rma_fit_summary$ci.lb[1],\r\n logOR_rma_upper = rma_fit_summary$ci.ub[1],\r\n p10_rma = rma_fit_summary$pval[1],\r\n tau_rma = sqrt(rma_fit_summary$tau2),\r\n Q_rma = rma_fit_summary$QE,\r\n Qp_rma = rma_fit_summary$QEp\r\n )\r\n }\r\n results <- do.call(rbind.data.frame, results)\r\n conds$results[[i]] <- rbind(conds$results[[i]], results)\r\n conds[i, , drop = FALSE]\r\n # save after every condition in order not to loose any trials\r\n # save(conds, file = \"sim_results.Rda\")\r\n}\r\n\r\n# -------- simulations -----------\r\nload(\"meta_data.Rda\")\r\n\r\n# set to FALSE to extend existing simulation results\r\nnew_sim <- FALSE\r\nif (new_sim) {\r\n conds <- expand.grid(\r\n mu = 0.25, \r\n tau = c(0, 0.05, 0.1, 0.20, 0.3, 0.4),\r\n nstudies = c(7, 14, 28),\r\n sdTau = c(1/64, 1/32, 1/16, 1/8, 1/4, 1/2, 1, 10, 100)\r\n )\r\n conds$scaleTau <- 1 / conds$sdTau^2\r\n conds$results <- vector(\"list\", nrow(conds))\r\n} else {\r\n conds <- load(\"sim_results.Rda\")\r\n}\r\n\r\nnsim <- 1\r\n\r\nlibrary(doParallel)\r\ncl <- makeCluster(2)\r\nregisterDoParallel(cl)\r\nconds_new <- foreach(i = 1:2, .combine = rbind) %dopar% {\r\n sim_models(i, conds = conds, metadata = metadata, nsim = nsim) \r\n}" 155 | ## [1] "breaks_cuts<-function(l,breaks){\r\n brks<-cut(l$value,breaks=breaks,include.lowest=TRUE)\r\n return(brks)\r\n}\r\n\r\n\r\ngen_plot<-function(arr,brks,lons,lats,season,var){\r\n\r\nlongdata<-melt(arr)\r\nlongdata$brks<-breaks_cuts(longdata,brks)\r\nlongdata_n<-longdata[(longdata[,2]<=nh_bound_lower & longdata[,2]>=nh_bound_upper),]\r\nlongdata_s<-longdata[(longdata[,2]<=sh_bound_lower & longdata[,2]>=sh_bound_upper),]\r\nlev_order<-levels(as.factor(longdata$brks))\r\n\r\ncontour_cols<-colorRampPalette(c(\"yellow\",\"green\",\"blue\",\"purple\",\"red\"))(length(lev_order))\r\nprint(lev_order)\r\nprint(contour_cols)\r\n m<-map_data(\"world2\")\r\n gplot<-ggplot()+ \r\n coord_cartesian(xlim=c(min(lons),max(lons)),\r\n ylim=c(min(lats),max(lats))) +\r\n geom_map(data= m, map = m, aes(map_id=region)) +\r\n stat_contour(aes(x=lons[longdata_n[,1]],\r\n y=lats[longdata_n[,2]],\r\n z = longdata_n[,3]),breaks=brks)+\r\n stat_contour(aes(x=lons[longdata_s[,1]],\r\n y=lats[longdata_s[,2]],\r\n z = longdata_s[,3]),breaks=brks) +\r\n \r\n geom_tile(data=longdata_n,aes(x=lons[longdata_n[,1]],\r\n y=lats[longdata_n[,2]],fill=brks),alpha=0.5) +\r\n geom_tile(data=longdata_s,aes(x=lons[longdata_s[,1]],\r\n y=lats[longdata_s[,2]],fill=brks),alpha=0.5) +\r\n scale_fill_manual(breaks=lev_order,values=contour_cols) +\r\n\r\n geom_rect(aes(xmin = lons[which(lons==LEFT_BOUND[2])],\r\n xmax = lons[which(lons==RIGHT_BOUND[2])],\r\n ymin = lats[which(lats==MIN_LAT[2])],\r\n ymax = lats[which(lats==MAX_LAT[2])]),\r\n fill = \"transparent\", color = \"red\", size = 1.5) +\r\n geom_rect(aes(xmin = lons[which(lons==LEFT_BOUND[3])],\r\n xmax = lons[which(lons==RIGHT_BOUND[3])],\r\n ymin = lats[which(lats==MIN_LAT[3])],\r\n ymax = lats[which(lats==MAX_LAT[3])]),\r\n fill = \"transparent\", color = \"red\", size = 1.5) +\r\n\r\n geom_rect(aes(xmin = lons[which(lons==LEFT_BOUND[5])],\r\n xmax = lons[which(lons==RIGHT_BOUND[5])],\r\n ymin = lats[which(lats==MIN_LAT[5])],\r\n ymax = lats[which(lats==MAX_LAT[5])]),\r\n fill = \"transparent\", color = \"red\", size = 1.5) +\r\n geom_rect(aes(xmin = lons[which(lons==LEFT_BOUND[6])],\r\n xmax = lons[which(lons==RIGHT_BOUND[6])],\r\n ymin = lats[which(lats==MIN_LAT[6])],\r\n ymax = lats[which(lats==MAX_LAT[6])]),\r\n fill = \"transparent\", color = \"red\", size = 1.5) +\r\n\r\n\r\n geom_rect(aes(xmin = lons[which(lons==LEFT_BOUND[1])],\r\n xmax = 360,\r\n ymin = lats[which(lats==MIN_LAT[1])],\r\n ymax = lats[which(lats==MAX_LAT[1])]),\r\n fill = \"transparent\", color = \"red\", size = 1.5) +\r\n geom_rect(aes(xmin = 0,\r\n xmax = lons[which(lons==RIGHT_BOUND[1])],\r\n ymin = lats[which(lats==MIN_LAT[1])],\r\n ymax = lats[which(lats==MAX_LAT[1])]),\r\n fill = \"transparent\", color = \"red\", size = 1.5) +\r\n geom_rect(aes(xmin = lons[which(lons==LEFT_BOUND[4])],\r\n xmax = 360,\r\n ymin = lats[which(lats==MIN_LAT[4])],\r\n ymax = lats[which(lats==MAX_LAT[4])]),\r\n fill = \"transparent\", color = \"red\", size = 1.5) +\r\n geom_rect(aes(xmin = 0,\r\n xmax = lons[which(lons==RIGHT_BOUND[4])],\r\n ymin = lats[which(lats==MIN_LAT[4])],\r\n ymax = lats[which(lats==MAX_LAT[4])]),\r\n fill = \"transparent\", color = \"red\", size = 1.5) +\r\n\r\n\r\n ggtitle(sprintf(\"%s Threshold for %s\",var,season)) +\r\n labs(x=\"Longitude\",y=\"Latitude\") \r\nreturn(gplot)\r\n}" 156 | ## [1] "Error in `rownames<-`(`*tmp*`, value = c(\"SAA3P 11p15.1 \", \"MIR494 14q32.31 \", : \r\n attempt to set 'rownames' on an object with no dimensions\r\nCalls: REVEALER.v1 ... .local -> silhouette -> silhouette.NMF -> rownames<-\r\nIn addition: There were 50 or more warnings (use warnings() to see the first 50)\r\nExecution halted" 157 | ## [1] " mf <- model.frame(formula)\r\n }\r\n else {\r\n mf <- model.frame(formula, data)\r\n }\r\n cl <- match.call()\r\n xy <- split(model.extract(mf, \"response\"), mf[, 2])\r\n faclevels <- names(xy)\r\n x <- xy[[1]]\r\n y <- xy[[2]]\r\n if (tr == 0.5) \r\n warning(\"Comparing medians should not be done with this function!\")\r\n alpha <- 0.05\r\n if (is.null(y)) {\r\n if (is.matrix(x) || is.data.frame(x)) {\r\n y = x[, 2]\r\n x = x[, 1]\r\n }\r\n if (is.list(x)) {\r\n y = x[[2]]\r\n x = x[[1]]\r\n }\r\n }\r\n if (tr > 0.25) \r\n print(\"Warning: with tr>.25 type I error control might be poor\")\r\n x <- x[!is.na(x)]\r\n y <- y[!is.na(y)]\r\n h1 <- length(x) - 2 * floor(tr * length(x))\r\n h2 <- length(y) - 2 * floor(tr * length(y))\r\n q1 <- (length(x) - 1) * winvar(x, tr)/(h1 * (h1 - 1))\r\n q2 <- (length(y) - 1) * winvar(y, tr)/(h2 * (h2 - 1))\r\n df <- (q1 + q2)^2/((q1^2/(h1 - 1)) + (q2^2/(h2 - 1)))\r\n crit <- qt(1 - alpha/2, df)\r\n dif <- mean(x, tr) - mean(y, tr)\r\n low <- dif - crit * sqrt(q1 + q2)\r\n up <- dif + crit * sqrt(q1 + q2)\r\n test <- abs(dif/sqrt(q1 + q2))\r\n yuen <- 2 * (1 - pt(test, df))\r\n result <- list(test = test, conf.int = c(low, up), p.value = yuen, \r\n df = df, diff = dif, call = cl)\r\n class(result) <- \"yuen\"\r\n result\r\n}" 158 | ## [1] "rm(list=ls()) \r\n# Remove pre-existing objects\r\n\r\n#' Extended Euclidean Algorithm\r\n#' Computes d=gcd(u,v) and a,b that satisfy \r\n#' a*u+b*v=d \r\n#' \r\n#' @param u,v: Two integers, with u>v\r\n#' @return A list with a,b,d, such that au+bv=d\r\ngcd_E = function(u,v){\r\n m = matrix(c(1,0,0,1),nrow=2) # m = |1 0|\r\n n = 0 # |0 1|\r\n \r\n while(v != 0){\r\n q = floor(u/v) # Get u/v, less the remainder\r\n m = m %*% matrix(c(q,1,1,0),nrow=2,byrow=T) # m = m * |q 1|\r\n temp = v # |1 0|\r\n v = u - q*v # (u,v)=(v,u-q*v)\r\n u = temp\r\n n = n+1\r\n }\r\n \r\n return( list(d=u, a=(-1)^n*m[2,2], b=(-1)^(n+1)*m[1,2]) )\r\n}" 159 | ## [1] "require(dplyr)\r\nrequire(readr)\r\nrequire(ggplot2)\r\nrequire(gridExtra)\r\nrequire(scales)\r\n\r\nCorp = read_csv('OECD_data/Seperated/CorpComb.csv')\r\nGaS = read_csv('OECD_data/Seperated/GaSComb.csv')\r\nPayroll = read_csv('OECD_data/Seperated/PayrollComb.csv')\r\nPersInc = read_csv('OECD_data/Seperated/PersIncComb.csv')\r\nProperty = read_csv('OECD_data/Seperated/PropertyComb.csv')\r\nSocSec = read_csv('OECD_data/Seperated/SocSecComb.csv')\r\nTaxRev = read_csv('OECD_data/Seperated/TaxRevCom.csv')\r\nWedge = read_csv('OECD_data/Seperated/Wedge.csv')\r\n\r\nCorp_avgs <- Corp %>% group_by(Country) %>% summarise(CorpGPDmean=mean(`Corp%GDP`),CorpTaxMean = mean(`Corp%Tax`))\r\nGaS_avgs <- GaS %>% group_by(Country) %>% summarise(GaSGPDmean=mean(`GaS%GDP`),GasTaxMean = mean(`GaS%Tax`))\r\nPayroll_avgs <- Payroll %>% group_by(Country) %>% summarise(PayrollGPDmean=mean(`Payroll%GDP`),PayrollTaxMean = mean(`Payroll%Tax`))\r\nPersInc_avgs <- PersInc %>% group_by(Country) %>% summarise(PersIncGPDmean=mean(`PersInc%GDP`),PersIncTaxMean = mean(`PersInc%Tax`))\r\nProperty_avgs <- Property %>% group_by(Country) %>% summarise(PropGPDmean=mean(`Prop%GDP`),PropTaxMean = mean(`Prop%Tax`))\r\nSocSec_avgs <- SocSec %>% group_by(Country) %>% summarise(SocSecGPDmean=mean(`SocSec%GDP`),SocSecTaxMean = mean(`SocSec%Tax`))\r\nTaxRev_avgs <- TaxRev %>% group_by(Country) %>% summarise(TaxRevGPDmean=mean(`TaxRev%GDP`),TaxRevPerCapMean = mean(`TaxRevPerCap`))\r\nWedge_avgs <- Wedge %>% group_by(Country) %>% summarise(PercentLaborCostMean=mean(Wedge))\r\n\r\nall_avgs <- inner_join(Corp_avgs,GaS_avgs)\r\nall_avgs <- inner_join(all_avgs,Payroll_avgs)\r\nall_avgs <- inner_join(all_avgs,PersInc_avgs)\r\nall_avgs <- inner_join(all_avgs,Property_avgs)\r\nall_avgs <- inner_join(all_avgs,SocSec_avgs)\r\nall_avgs <- inner_join(all_avgs,TaxRev_avgs)\r\nall_avgs <- inner_join(all_avgs,Wedge_avgs)\r\n\r\nwrite_excel_csv(all_avgs,\"all_averages_by_loc.csv\")\r\n# Did some Excel Tweaking\r\nnew_all_avgs <- read_csv('all_averages_by_loc.csv')\r\n\r\nCorpGDPgg <- ggplot(new_all_avgs, aes(x = new_all_avgs$CorpGPDmean,y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n labs(x = 'as % of GDP', y = \"\")\r\nCorpTaxgg <- ggplot(new_all_avgs, aes(x = new_all_avgs$CorpTaxMean,y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n labs(x = 'as % of Total Taxation', y = \"\")\r\n\r\nCorpPair <-grid.arrange(CorpGDPgg, CorpTaxgg, ncol = 2, left=\"Happiness Index\", top = \"Corporate Taxation (2013 - 2015 Averages)\")\r\nggsave(\"CorpPair.png\", CorpPair, width = 8, height = 4)\r\n\r\nGaSGDPgg <- ggplot(new_all_avgs, aes(x = new_all_avgs$GaSGPDmean,y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n labs(x = 'as % of GDP', y = \"\")\r\nGaSTaxgg <- ggplot(new_all_avgs, aes(x = new_all_avgs$GaSTaxMean,y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n labs(x = 'as % of Total Taxation', y = \"\")\r\n\r\nGaSPair <-grid.arrange(GaSGDPgg, GaSTaxgg, ncol = 2, left=\"Happiness Index\", top = \"Goods And Services Taxation (2013 - 2015 Averages)\")\r\nggsave(\"GaSPair.png\", GaSPair, width = 8, height = 4)\r\n\r\nPayrollGDPgg <- ggplot(new_all_avgs, aes(x = new_all_avgs$PayrollGPDmean,y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n labs(x = 'as % of GDP', y = \"\")\r\nPayrollTaxgg <- ggplot(new_all_avgs, aes(x = new_all_avgs$PayrollTaxMean,y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n labs(x = 'as % of Total Taxation', y = \"\")\r\n\r\nPayrollPair <-grid.arrange(PayrollGDPgg, PayrollTaxgg, ncol = 2, left=\"Happiness Index\", top = \"Payroll Taxation (2013 - 2015 Averages)\")\r\nggsave(\"PayrollPair.png\", PayrollPair, width = 8, height = 4)\r\n\r\nPersIncGDPgg <- ggplot(new_all_avgs, aes(x = new_all_avgs$PersIncGPDmean,y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n labs(x = 'as % of GDP', y = \"\")\r\nPersIncTaxgg <- ggplot(new_all_avgs, aes(x = new_all_avgs$PersIncTaxMean,y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n labs(x = 'as % of Total Taxation', y = \"\")\r\n\r\nPersIncPair <-grid.arrange(PersIncGDPgg, PersIncTaxgg, ncol = 2, left=\"Happiness Index\", top = \"Personal Income Taxation (2013 - 2015 Averages)\")\r\nggsave(\"PersIncPair.png\", PersIncPair, width = 8, height = 4)\r\n\r\nPropGDPgg <- ggplot(new_all_avgs, aes(x = new_all_avgs$PropGPDmean,y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n labs(x = 'as % of GDP', y = \"\")\r\nPropTaxgg <- ggplot(new_all_avgs, aes(x = new_all_avgs$PropTaxMean,y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n labs(x = 'as % of Total Taxation', y = \"\")\r\n\r\nPropPair <-grid.arrange(PropGDPgg, PropTaxgg, ncol = 2, left=\"Happiness Index\", top = \"Property Taxation (2013 - 2015 Averages)\")\r\nggsave(\"PropPair.png\", PropPair, width = 8, height = 4)\r\n\r\nSocSecGDPgg <- ggplot(new_all_avgs, aes(x = new_all_avgs$SocSecGPDmean,y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n labs(x = 'as % of GDP', y = \"\")\r\nSocSecTaxgg <- ggplot(new_all_avgs, aes(x = new_all_avgs$SocSecTaxMean,y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n labs(x = 'as % of Total Taxation', y = \"\")\r\n\r\nSocSecPair <-grid.arrange(SocSecGDPgg, SocSecTaxgg, ncol = 2, left=\"Happiness Index\", top = \"Social Security Taxation (2013 - 2015 Averages)\")\r\nggsave(\"SocSecPair.png\", SocSecPair, width = 8, height = 4)\r\n\r\nTaxRevGDPgg <- ggplot(new_all_avgs, aes(x = new_all_avgs$TaxRevGPDmean, y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n labs(x = 'as % of GDP', y = \"\")\r\nTaxRevTaxgg <- ggplot(new_all_avgs, aes(x = new_all_avgs$TaxRevPerCapMean, y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n scale_x_continuous(labels = comma) +\r\n labs(x = 'USD Per Capita', y = \"\") \r\n\r\nTaxRevPair <-grid.arrange(TaxRevGDPgg, TaxRevTaxgg, ncol = 2, left=\"Happiness Index\", top = \"Total Tax Revenue (2013 - 2015 Averages)\")\r\nggsave(\"TaxRevPair.png\", TaxRevPair, width = 8, height = 4)\r\n\r\nWedgegg <- ggplot(new_all_avgs, aes(x = new_all_avgs$PercentLaborCostMean, y = new_all_avgs$`Avg Happiness Index`)) + \r\n geom_text(aes(label=new_all_avgs$Country), size = 2) +\r\n labs(x = '% of Labor Cost', y = \"\", title = \"Wedge\")\r\n\r\nggsave(\"Wedge.png\", Wedgegg, width = 4, height = 4)" 160 | ## [1] "# Problem: Suppose a production process produces widgets with a weight distributed\r\n# as a normal variable with mean of 100 grams and standard deviation of 10 grams.\r\n# What is the probability of a random sample of size 25 having a mean value that is \r\n# outside 100 +- 2 grams?\r\n\r\ncord.x <- c(-3, seq(-3, -2*(5)/10, 0.01), -2*(5)/10)\r\ncord.y <- c(0, dnorm(seq(-3, -2*(5)/10, 0.01),0,1), 0)\r\ncord.xx <- c(2*(5)/10, seq(2*5/10, +3, 0.01), +3)\r\ncord.yy <- c(0, dnorm(seq(2*5/10, +3, 0.01),0,1), 0)\r\ncurve(dnorm(x,0,1),xlim=c(-3,3),main=\"Standard Normal Density\", ylab = \"density\")\r\npolygon(cord.x,cord.y,col=\"skyblue\")\r\npolygon(cord.xx,cord.yy,col=\"skyblue\")" 161 | ## [1] "library(rstan)\r\nlibrary(HDInterval)\r\n\r\ncomp_mod <- 'data {\r\n int k; //studies\r\n real y[k]; // effects for k studies\r\n real sigma[k]; // s.e. of effect estimates \r\n}\r\nparameters {\r\n real mu; \r\n real tau;\r\n real eta[k];\r\n}\r\ntransformed parameters {\r\n real theta[k];\r\n for (i in 1:k)\r\n theta[i] = mu + tau * eta[i];\r\n}\r\nmodel {\r\n tau ~ cauchy(0, 0.2);\r\n mu ~ normal(0, 0.5);\r\n target += normal_lpdf(eta | 0, 1);\r\n target += normal_lpdf(y | theta, sigma);\r\n} '\r\n\r\nk <- 5\r\nmat <- matrix(nrow = 100)\r\n\r\nfor(i in 1:100){\r\nd_truth <- rnorm(k, mean=0, sd= 0.1)\r\nstudy_se <- runif(k, min = 0.05, max = 0.35)\r\nk_theta <- rnorm(k, mean=d_truth, sd=study_se)\r\n\r\nm_bayes <- sampling(object = comp_mod, \r\n data = list(k = length(1:k), \r\n y = k_theta,\r\n sigma = study_se), \r\n control = list(adapt_delta = 0.95), \r\n iter = 2000, warmup=1000, thin=1, chains = 2)\r\ntemp <- rstan::extract(m_bayes, par = c(\"tau\", \"mu\"))\r\nmat[i,1] <- hdi(temp$tau)[[1]]\r\n}\r\nsum(mat[,1] > 0)" 162 | ## [1] "library(ffbase)\r\nlibrary(ffbase2)\r\nlibrary(dplyr)\r\nlibrary(ETLUtils)\r\n\r\nsetwd('/Users/chun/Documents/R')\r\n\r\nx <- read.csv.ffdf(file = \"2010_BSA_Carrier_PUF.csv\", \r\n colClasses = c(\"integer\",\"integer\",\"factor\",\"factor\",\"factor\",\"integer\",\"integer\",\"factor\",\"integer\",\"integer\",\"integer\"), \r\n transFUN=function(x){\r\n names(x) <- recoder(names(x), \r\n from = c(\"BENE_SEX_IDENT_CD\", \"BENE_AGE_CAT_CD\", \"CAR_LINE_ICD9_DGNS_CD\", \"CAR_LINE_HCPCS_CD\",\r\n \"CAR_LINE_BETOS_CD\", \"CAR_LINE_SRVC_CNT\", \"CAR_LINE_PRVDR_TYPE_CD\", \"CAR_LINE_CMS_TYPE_SRVC_CD\",\r\n \"CAR_LINE_PLACE_OF_SRVC_CD\", \"CAR_HCPS_PMT_AMT\", \"CAR_LINE_CNT\"), \r\n to = c(\"sex\", \"age\", \"diagnose\", \"healthcare.procedure\",\r\n \"typeofservice\", \"service.count\", \"provider.type\", \"servicesprocessed\",\r\n \"place.served\", \"payment\", \"carrierline.count\"))\r\n x$sex <- factor(recoder(x$sex, from = c(1,2), to=c(\"Male\",\"Female\"))) \r\n x$age <- factor(recoder(x$age, from = c(1,2,3,4,5,6), to=c(\"Under 65\", \"65-69\", \"70-74\", \"75-79\", \"80-84\", \"85 and older\")))\r\n x$place.served <- factor(recoder(x$place.served, \r\n from = c(0, 1, 11, 12, 21, 22, 23, 24, 31, 32, 33, 34, 41, \r\n 42, 50, 51, 52, 53, 54, 56, 60, 61, 62, 65, 71, 72, \r\n 81, 99), \r\n to = c(\"Invalid Place of Service Code\", \"Office (pre 1992)\",\r\n \"Office\",\"Home\",\"Inpatient hospital\",\"Outpatient hospital\",\r\n \"Emergency room - hospital\",\"Ambulatory surgical center\",\"Skilled nursing facility\",\r\n \"Nursing facility\",\"Custodial care facility\",\"Hospice\",\"Ambulance - land\",\"Ambulance - air or water\",\r\n \"Federally qualified health centers\",\r\n \"Inpatient psychiatrice facility\", \"Psychiatric facility partial hospitalization\", \r\n \"Community mental health center\", \"Intermediate care facility/mentally retarded\", \r\n \"Psychiatric residential treatment center\", \"Mass immunizations center\", \r\n \"Comprehensive inpatient rehabilitation facility\", \r\n \"End stage renal disease treatment facility\",\r\n \"State or local public health clinic\",\"Independent laboratory\", \"Other unlisted facility\")))\r\n x\r\n }, VERBOSE=TRUE)\r\n\r\ndoby <- list()\r\nx %>%\r\n group_by(sex) %>%\r\n summarise(count=n(), sum=sum(payment), mean=mean(payment)) -> doby$sex" 163 | ## [1] "*********** Welcome to Carder007 ************\r\n\r\nHI ALL NEW CLIENT\r\nIM BIG HACKER GOOD\r\n- I'm is Professional seller,more than 6 years experience,i have sold cvv credit card to many customers all over the world.\r\n- Selling cvv, fullz many country as: Canada,USA,Australia,UK...all And many country in Europe: Fr,Ger,Spain,Ita...\r\n- I hope we will work together for a long time.\r\n- Always sell cvv quality with high balance.\r\n- I have a website but if you want buy cvv good price please contact me.\r\n- carder007 register, carder007 login, carder007 cc, buy cvv, buy cc, buy bank login, buy bank logs, buy dumps, buy dumps with pin, carder007 registration, carder007 all you need, carder007 cvv\r\n\r\n\r\nContact me:\r\nSupport manager shop (English)\r\nYahoo: carder007 .ru\r\nICQ : 674586348\r\nEmail : carder007.ru@yahoo.com\r\nWebsite : http://Carder007s.com\r\n http://Carder007.surf\r\n http://Carder007.info\r\n http://Carder007u.us\r\n http://carder0077.tv\r\n\r\n \r\n \r\n \r\n___________________ CCV !! CCN ______________________\r\n\r\nList cc and my price..\r\n\r\n- Us (Visa,Master) = 10$ per 1\r\n- Us (Amex,Dis) = 12$ per 1\r\n- Us Bin 15$ , US Dob 25$\r\n- Us fullz info = 30$ per 1\r\n--------------------------------\r\n- Uk (Visa,Master) = 15$ per 1\r\n- Uk (Amex,Dis) = 20$ per 1\r\n- Uk Bin 20$ , UK Dob 25$\r\n- Uk fullz info = 35$ per 1\r\n--------------------------------\r\n- Ca (Visa,Master) = 20$ per 1\r\n- Ca (Amex,Dis) = 25$ per 1\r\n- Ca Bin 20$ , CA Dob 25$\r\n- Ca fullz info = 35$ per 1\r\n--------------------------------\r\n- Au (Visa,Master) = 20$ per 1\r\n- Au (Amex,Dis) = 25$ per 1\r\n- Au Bin 23$ , AU Dob 30$\r\n- Au fullz info = 30$ per 1\r\n--------------------------------\r\n- Eu (Visa,Master) = 25$ per 1\r\n- Eu (Amex,Dis) = 28$ per 1\r\n- Eu Bin 30$ , AU Dob 35$\r\n- Eu fullz info = 45$ per 1\r\n--------------------------------\r\n- RDP = 25$\r\n- SMTP = 30$ ( All Country )\r\n- Italy = 25$ per 1 (fullz info = 40$)\r\n- Spain = 25$ per 1 (fullz info = 40$)\r\n- Denmark = 30$ per1 (fullz info = 40$)\r\n- Sweden = 25$ per 1 (fullz info = 40$)\r\n- France = 25$ per 1 (fullz info = 40$)\r\n- Germany = 25$ per 1 (fullz info = 40$)\r\n- Ireland = 25$ per 1 (fullz info = 40$)\r\n- Mexico = 20$ per 1 (fullz info = 35$)\r\n- Asia = 20$ per 1 (fullz info = 35$)\r\n\r\n_________i Only Exchange WU to PM , WU to WMZ_________\r\n\r\n- 100$ WU = 100$ PM\r\n- 200$ WU = 200$ PM\r\n- 100$ WU = 110$ WMZ\r\n- 200$ WU = 210$ WMZ\r\n\r\n________________ Do WU transfer ______________________\r\n\r\n- 700$ for MTCN 8000$\r\n- 550$ for MTCN 6000$\r\n- 400$ for MTCN 4000$\r\n- 200$ for MTCN 1500$\r\n\r\n__________ Bank Logins Prices US UK CA AU EU _____________\r\n\r\n. Bank Us : ( Bank of america,HALIFAX,BOA,CHASE,Wells Fargo...)\r\n. Balance 3000$ = 150$\r\n. Balance 5000$ = 250$\r\n. Balance 8000$ = 400$\r\n. Balance 12000$ = 600$\r\n. Balance 15000$ = 800$\r\n. Balance 20000$ = 1000$\r\n\r\n- Bank UK : ( LLOYDS TSB,BARCLAYS,Standard Chartered,HSBC...)\r\n. Balance 5000 GBP = 300$\r\n. Balance 12000 GBP = 600$\r\n. Balance 16000 GBP = 700$\r\n. Balance 20000 GBP = 1000$\r\n. Balance 30000 GBP = 1200$\r\n\r\n__________________ PayPal account _______________________\r\n\r\n= Account Paypal 1500$ = 200$\r\n= Account Paypal 2500$ = 250$\r\n= Account Paypal 4000$ = 350$\r\n= Account Paypal 7000$ = 550$\r\n\r\n_____________ Dumps track 1 track 2 with pin _____________\r\n\r\n- Dumps,Tracks 1&2 Us = 110$ per 1\r\n- Dumps,Tracks 1&2 Uk = 120$ per 1\r\n- Dumps,Tracks 1&2 Ca = 120$ per 1\r\n- Dumps,Tracks 1&2 Au = 120$ per 1\r\n- Dumps,Tracks 1&2 Eu = 150$ per 1\r\n\r\n-Sample Dump + Pin:\r\nTrack1 : B4096663104697113^FORANTO/CHRI STOPHER M^09061012735200521000000 ,\r\nTrack2 : 4096663104697113=0906101273525 21\r\nPin : 1783\r\n___________________________________________________________\r\n\r\n-WARRANTY time is 10 HOURS. Any cvv purchase over 10 hours can not warranty.\r\n-If you buy over 30 cvvs, i will sell for you best price.\r\n-I will discount for you if you are reseller or you order everyday many on the next day.\r\n-I will prove to you that I am the best sellers. And make sure you will enjoy doing business with me.\r\n-I accept Bitcoin ,WU (western union) , WMZ (webmoney) or MoneyGram...\r\n\r\nContact me\r\nSupport manager shop (English)\r\nYahoo: carder007 .ru\r\nICQ : 674586348\r\nEmail : carder007.ru@yahoo.com\r\nWebsite : http://Carder007s.com\r\n http://Carder007.surf\r\n http://Carder007.info\r\n http://Carder007u.us\r\n http://carder0077.tv\r\n\r\n\r\ncarder007 register\r\ncarder007 login\r\ncarder007 cc\r\nbuy cvv\r\nbuy cc\r\nbuy bank logins\r\nbuy bank logs\r\nbuy dumps \r\nbuy dumps with pin" 164 | ## [1] "for(i in 1:nflocks){\r\n rddays <- ddays + days(i) - days(1)\r\n a <- print(rddays)\r\n}" 165 | ## [1] "crearsimplex<-function(M){\r\n tam<-dim(M)\r\n v <- c()\r\n v2 <- c()\r\n my.lp <- make.lp(tam[1],tam[2])\r\n for(i in 1:tam[2]){\r\n set.column(my.lp, i, M[,i])\r\n v[i] <- 1\r\n }\r\n for(i in 1:tam[1]){\r\n v2[i] <- 1\r\n }\r\n set.objfn(my.lp,v)\r\n set.constr.type(my.lp, rep(\">=\",tam[1]))\r\n set.rhs(my.lp, v2)\r\n my.lp\r\n x<-solve(my.lp)\r\n y<-get.objective(my.lp)\r\n z<-get.variables(my.lp)\r\n salida <- c(x,y,z)\r\n return(salida)\r\n}\r\n\r\naux <- 0\r\nv_ma <- c(1,1,1,0,0,0,\r\n 1,1,0,1,0,0,\r\n 1,0,1,1,1,0,\r\n 0,1,1,1,0,1,\r\n 0,0,1,0,1,1,\r\n 0,0,0,1,1,1)\r\nM <- matrix(v_ma,nrow=6+aux,ncol=6,byrow = TRUE)\r\ntam <- dim(M)\r\nsalida <- crearsimplex(M)\r\nverificador <- 1\r\n\r\nif(salida[1]==0){\r\n mayor <- 0\r\n posant<- pos\r\n pos <- 0\r\n \r\n for(i in 3:length(salida)){\r\n if(salida[i]!=0 & salida[i]!=1){\r\n verificador <- verificador * 0\r\n }\r\n }\r\n \r\n if(verificador == 1){\r\n for(i in 3:length(salida)){\r\n if(salida[i]>mayor){\r\n mayor<-salida[i]\r\n pos<- i\r\n }\r\n }\r\n }\r\n \r\n if(agregar==TRUE){\r\n n_res<-rep(0, tam(2))\r\n n_res[pos-2]<-1\r\n M <- rbind(M,n_res)\r\n }else{\r\n M <- M[-\"n_res\",]\r\n }\r\n \r\n\r\n salida <- crearsimplex(M)\r\n \r\n}else{\r\n print(\"No tiene solución factible\")\r\n}" 166 | ## [1] "setwd(\"/Users/gemenenarcis/Documents/MATLAB/Football-data-challenge/R/\")\r\nlibrary(gbm)\r\ntraincsv <- read.csv(\"../trainSet/train.csv\",header = TRUE,sep=\",\")\r\ntestcsv <- read.csv(\"../testSet/test.csv\",header = TRUE,sep=\",\")\r\ndates <- as.Date(traincsv$Date, \"%Y-%m-%d\")\r\nyears <- as.numeric(format(dates, \"%Y\"))\r\nmonths <- format(dates, \"%d\")\r\nuniqueYears <- sort.int(unique(years))\r\nindexes <- matrix(0,2,length(uniqueYears) - 1)\r\nfor (i in 1:(length(uniqueYears) - 1))\r\n{\r\n year <- uniqueYears[i]\r\n nextYear <- uniqueYears[i+1]\r\n indx <- as.numeric(format(dates, \"%Y\")) == year & as.numeric(format(dates, \"%m\")) >= 8\r\n indx <- indx | (as.numeric(format(dates, \"%Y\")) == nextYear & as.numeric(format(dates, \"%m\")) <= 6)\r\n indx <- which(indx == TRUE)\r\n \r\n #attention!!the dates in train have to be sorted\r\n if(length(indx) > 0)\r\n {\r\n indexes[1,i] <- min(indx);\r\n indexes[2,i] <- max(indx);\r\n stopifnot(length(indx) == indexes[2,i] - indexes[1,i] + 1)\r\n }\r\n}\r\nmatches <- matrix(0, length(traincsv$ID), 2);\r\nwinners <- matrix(0, length(traincsv$ID), 1);\r\n\r\nfor (i in 1:length(traincsv$ID))\r\n{\r\n array <- unlist(train[i,], use.names = FALSE)\r\n array <- array[!is.na(array)]\r\n matches[i,] <- array[3:4]\r\n winners[i] <- array[5]\r\n if(winners[i] == 3)\r\n winners[i] <- 1\r\n else if(winners[i] == 1)\r\n winners[i] <- 3\r\n}\r\n#data.frame(\"Actual\" = train$HomeTeam, \r\n # \"PredictedProbability\" = train$AwayTeam)\r\n\r\nLogLossBinary = function(actual, predicted, eps = 1e-15) { \r\n predicted = pmin(pmax(predicted, eps), 1-eps) \r\n - (sum(actual * log(predicted) + (1 - actual) * log(1 - predicted))) / length(actual)\r\n}\r\n\r\nfor (i in 1:1)#ncol(indexes))\r\n{\r\n dataSubsetProportion = .2;\r\n rows = indexes[1,i]:indexes[2,i]\r\n trainingNonHoldoutSet = traincsv[!(1:nrow(traincsv) %in% rows), 3:4];#to train\r\n print(nrow(trainingHoldoutSet))\r\n print(nrow(trainingNonHoldoutSet))\r\n \r\n \r\n gbmWithCrossValidation = gbm(formula = traincsv$FTR[!(1:nrow(traincsv) %in% rows)] ~ .,\r\n distribution = \"multinomial\",\r\n data = trainingNonHoldoutSet,\r\n n.trees = 2000,\r\n shrinkage = .1,\r\n n.minobsinnode = 200, \r\n cv.folds = 5,\r\n n.cores = 1)\r\n bestTreeForPrediction = gbm.perf(gbmWithCrossValidation)\r\n \r\n gbmHoldoutPredictions = predict(object = gbmWithCrossValidation,\r\n newdata = trainingHoldoutSet,\r\n n.trees = bestTreeForPrediction,\r\n type = \"response\")\r\n \r\n gbmNonHoldoutPredictions = predict(object = gbmWithCrossValidation,\r\n newdata = trainingNonHoldoutSet,\r\n n.trees = bestTreeForPrediction,\r\n type = \"response\")\r\n print(paste(LogLossBinary(train$Response[randomRows], gbmHoldoutPredictions), \r\n \"Holdout Log Loss\"))\r\n print(paste(LogLossBinary(train$Response[!(1:nrow(train) %in% randomRows)], gbmNonHoldoutPredictions), \r\n \"Non-Holdout Log Loss\"))\r\n}\r\n\r\n\r\n#dataSubsetProportion = .2;\r\n#randomRows = sample(1:nrow(train), floor(nrow(train) * dataSubsetProportion));#\r\n#trainingHoldoutSet = train[randomRows, ];#to test\r\n#trainingNonHoldoutSet = train[!(1:nrow(train) %in% randomRows), ];#to train\r\n\r\n#gbmWithCrossValidation = gbm(formula = Response ~ .,\r\n# distribution = \"bernoulli\",\r\n# data = trainingNonHoldoutSet,\r\n# n.trees = 2000,\r\n# shrinkage = .1,\r\n# n.minobsinnode = 200, \r\n# cv.folds = 5,\r\n# n.cores = 1)\r\n\r\n#best TreeForPrediction = gbm.perf(gbmWithCrossValidation)\r\n#\"%y-%d-%d\"\r\n#for(i in 1:length(train$HomeTeam))\r\n#{\r\n #array = unlist(train[i,], use.names = FALSE);\r\n #array = array[!is.na(array)];\r\n #print(array);\r\n#}" 167 | ## [1] "obj <- function(x, g, np){\r\n mean((tapply(g, x, sum) - sum(g) / np)^2)\r\n}\r\n\r\ngg <- sample(1:30, 2e3, TRUE)\r\nnumPack <- 20\r\n\r\nidx <- matrix(NA, numPack, length(gg) / numPack)\r\nfor (i in 1:ncol(idx)) {\r\n if (i %% 2 == 0) {\r\n idx[ , i] <- numPack:1\r\n } else {\r\n idx[ , i] <- 1:numPack\r\n }\r\n}\r\n\r\nobj(as.vector(idx), sort(gg), numPack) # 10432.6\r\n\r\ntapply(sort(gg), as.vector(idx), sum)\r\n# 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 \r\n# 1532 1532 1533 1533 1532 1535 1536 1533 1533 1534 1536 1535 1535 1532 1532 1532 1532 1531 1531 1531 \r\nsum(gg) / numPack # 1533" 168 | ## [1] "obj <- function(x, g, np){\r\n mean((tapply(g, x, sum) - sum(g) / np)^2)\r\n}\r\n\r\ngg <- sample(1:30, 2e3, TRUE)\r\nnumPack <- 20\r\n\r\nidx <- matrix(NA, numPack, length(gg) / numPack)\r\nfor (i in 1:ncol(idx)) {\r\n if (i %% 2 == 0) {\r\n idx[ , i] <- numPack:1\r\n } else {\r\n idx[ , i] <- 1:numPack\r\n }\r\n}\r\n\r\nobj(as.vector(idx), gg, numPack) # 10432.6\r\n\r\ntapply(gg, as.vector(idx), sum)\r\n# 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 \r\n# 1609 1589 1730 1497 1629 1650 1519 1517 1401 1611 1408 1544 1388 1402 1517 1501 1598 1462 1699 1389 \r\nsum(gg) / numPack # 1533" 169 | 170 | ### Test Results 171 | 172 | ``` r 173 | library(pastebin) 174 | library(testthat) 175 | 176 | date() 177 | ``` 178 | 179 | ## [1] "Fri Jul 28 22:02:05 2017" 180 | 181 | ``` r 182 | test_dir("tests/") 183 | ``` 184 | 185 | ## testthat results ======================================================================================================== 186 | ## OK: 0 SKIPPED: 0 FAILED: 0 187 | ## 188 | ## DONE =================================================================================================================== 189 | -------------------------------------------------------------------------------- /man/as.character.paste.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/get-paste.r 3 | \name{as.character.paste} 4 | \alias{as.character.paste} 5 | \title{Extract just the paste text from a paste object} 6 | \usage{ 7 | \method{as.character}{paste}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{paste object} 11 | 12 | \item{...}{unused} 13 | } 14 | \description{ 15 | Extract just the paste text from a paste object 16 | } 17 | -------------------------------------------------------------------------------- /man/get_paste.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/get-paste.r 3 | \name{get_paste} 4 | \alias{get_paste} 5 | \title{Get raw paste data} 6 | \usage{ 7 | get_paste(x, use_scraping_api = FALSE, include_metadata = FALSE) 8 | } 9 | \arguments{ 10 | \item{x}{paste id} 11 | 12 | \item{use_scraping_api}{if a pro member, set this to \code{TRUE}, otherwise leave it \code{FALSE} 13 | and be kind to their servers lest ye be banned.} 14 | 15 | \item{include_metadata}{if \code{use_scraping_api} is \code{TRUE} and this is \code{TRUE}, the returned 16 | \code{list} will include metadata} 17 | } 18 | \value{ 19 | a \code{list} with the paste text or the paste text plus metadata. A \code{list} is returned 20 | to make it easier to deal with the results programmatically. Returning a \code{list} 21 | in one call context and a \code{character} vector in another may be OK interactively 22 | bit it creates a situation where you need to write \code{if} logic to handle 23 | programmatically. Use \link{toString} to extract just the paste body 24 | } 25 | \description{ 26 | Get raw paste data 27 | } 28 | \note{ 29 | This API call can use the Scraping API which requires a paid account and a white-listed IP address. 30 | } 31 | \references{ 32 | \href{https://pastebin.com/api_scraping_faq}{Scraping API} 33 | } 34 | -------------------------------------------------------------------------------- /man/get_paste_metadata.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/get-paste-metadata.r 3 | \name{get_paste_metadata} 4 | \alias{get_paste_metadata} 5 | \title{Get paste metadata} 6 | \usage{ 7 | get_paste_metadata(paste_id) 8 | } 9 | \arguments{ 10 | \item{paste_id}{paste id} 11 | } 12 | \description{ 13 | Get paste metadata 14 | } 15 | \note{ 16 | This API call uses the Scraping API which requires a paid account and a white-listed IP address. 17 | } 18 | \references{ 19 | \href{https://pastebin.com/api_scraping_faq}{Scraping API} 20 | } 21 | -------------------------------------------------------------------------------- /man/get_recent_pastes.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/get-recent-pastes.r 3 | \name{get_recent_pastes} 4 | \alias{get_recent_pastes} 5 | \title{Get recent pastes} 6 | \usage{ 7 | get_recent_pastes(limit = 50, lang = NULL) 8 | } 9 | \arguments{ 10 | \item{limit}{number of recent pastes to fetch. Limit is 500, default is 50.} 11 | 12 | \item{lang}{limit the recent paste list to a particular language. Default is all pastes} 13 | } 14 | \description{ 15 | Get recent pastes 16 | } 17 | \note{ 18 | This API call uses the Scraping API which requires a paid account and a white-listed IP address. 19 | } 20 | \references{ 21 | \href{https://pastebin.com/api_scraping_faq}{Scraping API} 22 | } 23 | -------------------------------------------------------------------------------- /man/get_trending_pastes.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/get-trending-pastes.r 3 | \name{get_trending_pastes} 4 | \alias{get_trending_pastes} 5 | \title{Get trending pastes} 6 | \usage{ 7 | get_trending_pastes(pastebin_key = pastebin_api_key()) 8 | } 9 | \arguments{ 10 | \item{pastebin_key}{pastebin API key} 11 | } 12 | \description{ 13 | Get trending pastes 14 | } 15 | \references{ 16 | \url{https://pastebin.com/api#10} 17 | } 18 | -------------------------------------------------------------------------------- /man/new_paste.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/new-paste.r 3 | \name{new_paste} 4 | \alias{new_paste} 5 | \title{Create a new paste} 6 | \usage{ 7 | new_paste(text, name = NULL, format = "text", impersonate = FALSE, 8 | visibility = c("public", "unlisted", "private"), expires = "n", 9 | pastebin_key = pastebin_api_key()) 10 | } 11 | \arguments{ 12 | \item{text}{of paste} 13 | 14 | \item{name}{name/title of paste} 15 | 16 | \item{format}{hint for syntax highlighting. Defaults to \code{text}. See 17 | \href{https://pastebin.com/api#5}{the detail page} for more info.} 18 | 19 | \item{impersonate}{if \code{TRUE} then \code{PASTEBIN_USER} and \code{PASTEBIN_PASSWORD} \emph{must} be set 20 | in order to generate a user key to be applied with the API key. Don't blame me, 21 | blame \href{https://pastebin.com/api#8}{pastebin}.} 22 | 23 | \item{visibility}{one of \code{public}, \code{unlisted} or \code{private}. Defaults to \code{public}} 24 | 25 | \item{expires}{either \code{n} for never or an abbreviated time expiration string in the form 26 | of a digit (the "number of") and a units character \code{m} for minute(s), 27 | \code{d} for day(s), \code{w} for week(s). Defaults to \code{n} (never). See 28 | \href{https://pastebin.com/api#6}{the detail page} for more info.} 29 | 30 | \item{pastebin_key}{pastebin API key} 31 | } 32 | \description{ 33 | Create a new paste 34 | } 35 | \note{ 36 | The maximum size a paste can be is 512 kilobytes (0.5 megabytes). Pro members are 37 | allowed to create pastes up to 10 megabytes. 38 | } 39 | -------------------------------------------------------------------------------- /man/pastebin.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/pastebin-package.R 3 | \docType{package} 4 | \name{pastebin} 5 | \alias{pastebin} 6 | \alias{pastebin-package} 7 | \title{Tools to work with the pastebin API} 8 | \description{ 9 | Tools to work with the pastebin API 10 | } 11 | \author{ 12 | Bob Rudis (bob@rud.is) 13 | } 14 | -------------------------------------------------------------------------------- /man/pastebin_api_key.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/pastebin-api-key.r 3 | \name{pastebin_api_key} 4 | \alias{pastebin_api_key} 5 | \title{Get or set PASTEBIN_API_KEY value} 6 | \usage{ 7 | pastebin_api_key(force = FALSE) 8 | } 9 | \arguments{ 10 | \item{force}{force setting a new pastebin API key for the current environment?} 11 | } 12 | \value{ 13 | atomic character vector containing the pastebin API key 14 | } 15 | \description{ 16 | The API wrapper functions in this package all rely on a pastebin API 17 | key residing in the environment variable \code{PASTEBIN_API_KEY}. The 18 | easiest way to accomplish this is to set it in the `\code{.Renviron}` file in your 19 | home directory. 20 | } 21 | \note{ 22 | an pastebin API key is only necessary for "poster" access 23 | } 24 | -------------------------------------------------------------------------------- /man/toString.paste.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/get-paste.r 3 | \name{toString.paste} 4 | \alias{toString.paste} 5 | \title{Extract just the paste text from a paste object} 6 | \usage{ 7 | \method{toString}{paste}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{paste object} 11 | 12 | \item{...}{unused} 13 | } 14 | \description{ 15 | Extract just the paste text from a paste object 16 | } 17 | -------------------------------------------------------------------------------- /pastebin.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | StripTrailingWhitespace: Yes 16 | 17 | BuildType: Package 18 | PackageUseDevtools: Yes 19 | PackageInstallArgs: --no-multiarch --with-keep.source 20 | PackageBuildArgs: --resave-data 21 | PackageRoxygenize: rd,collate,namespace 22 | -------------------------------------------------------------------------------- /tests/test-all.R: -------------------------------------------------------------------------------- 1 | library(testthat) 2 | test_check("pastebin") 3 | -------------------------------------------------------------------------------- /tests/testthat/test-pastebin.R: -------------------------------------------------------------------------------- 1 | context("basic functionality") 2 | test_that("we can do something", { 3 | 4 | #expect_that(some_function(), is_a("data.frame")) 5 | 6 | }) 7 | --------------------------------------------------------------------------------