├── .gitignore ├── .npmignore ├── CHANGELOG.md ├── LICENSE ├── NOTICE ├── README.md ├── config.yaml ├── consumer-complaints.csv ├── consumer-complaints.json ├── cp-druid-logs ├── diamonds.csv ├── diamonds.js ├── diamonds.sql.zip ├── diamonds.txt ├── docker-init ├── docker-init-druid ├── docker-init-mysql ├── docker-init-postgres ├── docker-init-presto ├── docker-start ├── docker-stop ├── download-latest-druid-rc ├── druid ├── Dockerfile ├── channel-lookup │ └── channel-lookup.json ├── cp-logs ├── init ├── load-data ├── lookup-init.json ├── nato-phonetic │ └── nato-phonetic.json ├── override-imply ├── wikipedia-compact-index.json └── wikipedia-index.json ├── info.js ├── make-data-files ├── make-sql ├── mysql ├── load-data ├── wikipedia-create-raw-table.sql └── wikipedia-rollup.sql ├── package.json ├── postgres ├── load-data ├── wikipedia-create-raw-table.sql └── wikipedia-rollup.sql ├── presto └── etc │ ├── catalog │ ├── mysql.properties │ └── postgresql.properties │ ├── config.properties │ ├── jvm.config │ ├── log.properties │ └── node.properties ├── version ├── wikipedia-2015-09-12.gz └── wikipedia-sampled.json /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules/ 3 | *.sublime-* 4 | imply/ 5 | logs/ 6 | imply-*.tar.gz 7 | 8 | # You have to make the files in 'tmp' yourself by running ./make-data-files 9 | tmp/ 10 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | !info.js 2 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | For updates follow [@implydata](https://twitter.com/implydata) on Twitter. 4 | 5 | ## 0.1.0 6 | 7 | - Started this repo. current databases: Druid, MySQL, Postgres 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Copyright 2016 Imply Data Inc. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DataZoo 2 | 3 | Welcome to the Imply Data zoo. 4 | 5 | This project is mainly a testing ground for [Plywood](https://github.com/implydata/plywood), 6 | [Pivot](https://github.com/implydata/pivot), and [https://github.com/implydata/plyql](PlyQL). 7 | 8 | This repo has helpful scripts and seed data to setup a docker container with a number of data bases. 9 | 10 | ## Initial Setup 11 | 12 | ### Docker installation 13 | 14 | # Tutorial 15 | 16 | ## Install Docker (Mac) 17 | 18 | [Install Homebrew](http://brew.sh/#install) 19 | 20 | ```sh 21 | ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" 22 | ``` 23 | 24 | [Install Cask](http://caskroom.io/) 25 | 26 | ```sh 27 | brew install caskroom/cask/brew-cask 28 | ``` 29 | 30 | Install Virtualbox 31 | 32 | ```sh 33 | brew update 34 | brew cask install virtualbox 35 | ``` 36 | 37 | Docker Machine 38 | 39 | ```sh 40 | brew update 41 | brew install docker-machine docker 42 | docker-machine create --driver virtualbox default 43 | eval "$(docker-machine env default)" 44 | ``` 45 | 46 | Is it working? 47 | 48 | ``` 49 | docker run hello-world 50 | ``` 51 | 52 | ### After installing docker 53 | 1. Increase docker machine memory to 4GB 54 | 1. Open Oracle VM VirtualBox Manager (installed with docker) 55 | 2. Under default machine -> Settings -> System -> Base Memory -> 4096MB 56 | 2. Run `./make-data-files` 57 | 3. Run `./docker-init` 58 | 59 | ## Troubleshooting 60 | 61 | Error: `Error getting IP address: Something went wrong running an SSH command` 62 | Try recreating docker-machine 63 | 64 | ``` 65 | $ docker-machine rm default 66 | $ docker-machine create --driver virtualbox default 67 | ``` 68 | Error: `Error getting IP address: ssh command error: 69 | command : ip addr show 70 | err : exit status 255 71 | output :` 72 | Go to settings in VirtualBox Manager for your default machine 73 | In settings > Network > Adapter 2 (or whichever adapter is the Host-only Adapter) > Advanced > Promiscuous Mode > Allow All 74 | -------------------------------------------------------------------------------- /config.yaml: -------------------------------------------------------------------------------- 1 | port: 9090 2 | 3 | druidHost: 192.168.99.100 4 | 5 | sourceListScan: disable 6 | 7 | dataSources: 8 | - name: wikipedia-druid 9 | title: Wikipedia Druid 10 | engine: druid 11 | source: wikipedia 12 | 13 | refreshRule: 14 | rule: query 15 | refresh: P1D 16 | 17 | defaultSortMeasure: count 18 | 19 | defaultPinnedDimensions: [] 20 | 21 | introspection: no-autofill 22 | 23 | attributeOverrides: 24 | - name: sometimeLater 25 | type: TIME 26 | 27 | - name: commentLength 28 | type: NUMBER 29 | 30 | - name: deltaBucket100 31 | type: NUMBER 32 | 33 | - name: isAnonymous 34 | type: BOOLEAN 35 | 36 | - name: isMinor 37 | type: BOOLEAN 38 | 39 | - name: isNew 40 | type: BOOLEAN 41 | 42 | - name: isRobot 43 | type: BOOLEAN 44 | 45 | dimensions: 46 | - name: __time 47 | title: Time 48 | kind: time 49 | expression: $__time 50 | 51 | - name: sometimeLater 52 | title: Sometime Later 53 | kind: time 54 | 55 | - name: channel 56 | title: Channel 57 | expression: $channel 58 | 59 | - name: channel-lookup 60 | title: Channel Lookup 61 | expression: $channel.lookup('channel-lookup') 62 | 63 | - name: cityName 64 | title: City Name 65 | expression: $cityName 66 | 67 | - name: comment 68 | title: Comment 69 | expression: $comment 70 | 71 | - name: commentLength 72 | title: Comment Length 73 | expression: $commentLength 74 | kind: number 75 | #bucketingStrategy: defaultNoBucket 76 | granularities: [0.1, 0.5, 1, 2, 3] 77 | 78 | - name: commentLengthOver100 79 | title: Comment Length Over 100 80 | expression: $commentLength > 100 81 | kind: boolean 82 | 83 | - name: deltaBucket100 84 | title: Delta Bucket 85 | expression: $deltaBucket100 86 | kind: number 87 | 88 | - name: countryIsoCode 89 | title: Country Iso Code 90 | expression: $countryIsoCode 91 | 92 | - name: countryName 93 | title: Country Name 94 | expression: $countryName 95 | 96 | - name: isAnonymous 97 | title: Is Anonymous 98 | kind: boolean 99 | 100 | - name: isMinor 101 | title: Is Minor 102 | kind: boolean 103 | 104 | - name: isNew 105 | title: Is New 106 | kind: boolean 107 | 108 | - name: isRobot 109 | title: Is Robot 110 | kind: boolean 111 | 112 | - name: isUnpatrolled 113 | title: Is Unpatrolled 114 | expression: $isUnpatrolled 115 | 116 | - name: metroCode 117 | title: Metro Code 118 | expression: $metroCode 119 | 120 | - name: namespace 121 | title: Namespace 122 | expression: $namespace 123 | 124 | - name: page 125 | title: Page 126 | expression: $page 127 | 128 | - name: regionIsoCode 129 | title: Region Iso Code 130 | expression: $regionIsoCode 131 | 132 | - name: regionName 133 | title: Region Name 134 | expression: $regionName 135 | 136 | - name: user 137 | title: User 138 | expression: $user 139 | 140 | - name: userChars 141 | title: User Chars 142 | expression: $userChars 143 | 144 | - name: userCharsPhonetic 145 | title: User Chars Phonetic 146 | expression: $userChars.lookup("nato-phonetic") 147 | 148 | - name: cityLoveKoala 149 | title: ʕ•ᴥ•ʔ ❤️ City ❤️ ʕ•ᴥ•ʔ 150 | expression: "'ʕ•ᴥ•ʔ ❤️ ' ++ $cityName ++ ' ❤️ ʕ•ᴥ•ʔ'" 151 | 152 | measures: 153 | - name: count 154 | title: Count 155 | expression: $main.sum($count) 156 | 157 | - name: added 158 | title: Added 159 | expression: $main.sum($added) 160 | 161 | - name: deleted 162 | title: Deleted 163 | expression: $main.sum($deleted) 164 | 165 | - name: delta 166 | title: Delta 167 | expression: $main.sum($delta) 168 | 169 | - name: deltaByTen 170 | title: Delta By Ten 171 | expression: $main.sum($deltaByTen) 172 | 173 | - name: delta_hist_p95 174 | title: Delta Hist P95 175 | expression: $main.quantile($delta_hist,0.95) 176 | 177 | - name: delta_hist_p99 178 | title: Delta Hist P99 179 | expression: $main.quantile($delta_hist,0.99) 180 | 181 | - name: max_delta 182 | title: Max Delta 183 | expression: $main.max($max_delta) 184 | 185 | - name: min_delta 186 | title: Min Delta 187 | expression: $main.min($min_delta) 188 | 189 | - name: avg_commentLength 190 | title: Average Comment Length 191 | expression: $main.sum($commentLength * $count) / $main.sum($count) 192 | 193 | - name: max_commentLength 194 | title: Max Comment Length 195 | expression: $main.max($commentLength) 196 | 197 | - name: min_commentLength 198 | title: Min Comment Length 199 | expression: $main.min($commentLength) 200 | 201 | - name: page_unique 202 | title: Page Unique 203 | expression: $main.countDistinct($page_unique) 204 | 205 | - name: user_unique 206 | title: User Unique 207 | expression: $main.countDistinct($user_unique) 208 | 209 | - name: user_theta 210 | title: User Unique (theta) 211 | expression: $main.countDistinct($user_theta) 212 | 213 | 214 | - name: wikipedia-static 215 | title: Wikipedia Static 216 | engine: native 217 | source: ./wikipedia-sampled.json 218 | 219 | refreshRule: 220 | rule: query 221 | refresh: P1D 222 | 223 | defaultPinnedDimensions: [] 224 | 225 | introspection: autofill-all 226 | -------------------------------------------------------------------------------- /cp-druid-logs: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | 3 | eval $(docker-machine env) 4 | 5 | echo "Copying Druid logs..."; 6 | 7 | mkdir -p logs 8 | docker exec -it datazoo-druid /opt/data/druid/cp-logs; 9 | -------------------------------------------------------------------------------- /diamonds.sql.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/implydata/datazoo/a782f361188ad3d6a9021caf9c1b1c3e2a7c1f35/diamonds.sql.zip -------------------------------------------------------------------------------- /diamonds.txt: -------------------------------------------------------------------------------- 1 | Prices of 50,000 round cut diamonds 2 | Description 3 | A dataset containing the prices and other attributes of almost 54,000 diamonds. The variables are as follows: 4 | 5 | Format 6 | A data frame with 53940 rows and 10 variables 7 | 8 | Details 9 | price. price in US dollars (\$326--\$18,823) 10 | carat. weight of the diamond (0.2--5.01) 11 | cut. quality of the cut (Fair, Good, Very Good, Premium, Ideal) 12 | colour. diamond colour, from J (worst) to D (best) 13 | clarity. a measurement of how clear the diamond is (I1 (worst), SI1, SI2, VS1, VS2, VVS1, VVS2, IF (best)) 14 | x. length in mm (0--10.74) 15 | y. width in mm (0--58.9) 16 | z. depth in mm (0--31.8) 17 | depth. total depth percentage = z / mean(x, y) = 2 * z / (x + y) (43--79) 18 | table. width of top of diamond relative to widest point (43--95) -------------------------------------------------------------------------------- /docker-init: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | 3 | if [ `cat ./version` != `cat ./tmp/version` ] 4 | then 5 | echo "Data files out of date, please run ./make-data-files"; 6 | exit 1; 7 | fi 8 | 9 | docker-machine start default || echo "Machine is already running"; 10 | 11 | ./docker-init-druid 12 | ./docker-init-mysql 13 | ./docker-init-postgres 14 | # ./docker-init-presto 15 | -------------------------------------------------------------------------------- /docker-init-druid: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | eval $(docker-machine env) 3 | 4 | #OVERRIDE_IMPLY_VERSION="imply-2.5.8" 5 | 6 | echo "Loading Druid..."; 7 | docker rm -f datazoo-druid 2>/dev/null || echo "no Druid container to remove"; 8 | docker build -t datazoo-druid ./druid; 9 | docker run -v "$PWD":/opt/data -p 8081-8110:8081-8110 -p 8200:8200 -p 9095:9095 -d --name datazoo-druid datazoo-druid; 10 | 11 | if [ -z ${OVERRIDE_IMPLY_VERSION+x} ]; then 12 | echo "Using stock docker image"; 13 | else 14 | echo "Overriding Imply version to $OVERRIDE_IMPLY_VERSION"; 15 | 16 | # ensure that `imply` folder exists 17 | if tar -xzf "$OVERRIDE_IMPLY_VERSION.tar.gz" ; then 18 | rm -rf imply 19 | mv "$OVERRIDE_IMPLY_VERSION" imply 20 | else 21 | echo '===================================================================================' 22 | echo ' You are missing the override Imply distribution' 23 | echo ' You need to:' 24 | echo ' 1. Download the latest imply distribution form http://imply.io/get-started' 25 | echo ' 2. Put the tarball in this directory' 26 | echo ' 3. Run this script again' 27 | echo '===================================================================================' 28 | exit 1 29 | fi 30 | 31 | docker exec -it datazoo-druid /opt/data/druid/override-imply; 32 | docker restart datazoo-druid; 33 | docker exec -it datazoo-druid /tmp/init; 34 | fi 35 | 36 | docker exec -it datazoo-druid /opt/data/druid/load-data; 37 | -------------------------------------------------------------------------------- /docker-init-mysql: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | eval $(docker-machine env) 3 | 4 | echo "Loading MySQL..." 5 | docker rm -f datazoo-mysql 2>/dev/null || echo "no MySQL container to remove"; 6 | docker run -v "$PWD":/opt/data -p 3306:3306 -d --name datazoo-mysql -e MYSQL_ALLOW_EMPTY_PASSWORD='true' -d mysql/mysql-server:5.7 7 | docker exec -it datazoo-mysql /opt/data/mysql/load-data 8 | -------------------------------------------------------------------------------- /docker-init-postgres: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | eval $(docker-machine env) 3 | 4 | echo "Loading Postgres..." 5 | docker rm -f datazoo-postgres 2>/dev/null || echo "no Postgres container to remove"; 6 | docker run -v "$PWD":/opt/data -p 5432:5432 --name datazoo-postgres -e POSTGRES_USER=root -e POSTGRES_PASSWORD=datazoo -d postgres:9.5 7 | docker exec -it datazoo-postgres /opt/data/postgres/load-data 8 | -------------------------------------------------------------------------------- /docker-init-presto: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | eval $(docker-machine env) 3 | 4 | PRESTO_TAG=0.141 5 | SERV_PORT=8080 6 | 7 | echo "Loading Presto..." 8 | docker rm -f datazoo-presto 2>/dev/null || echo "no Presto container to remove"; 9 | 10 | docker run -d --name=datazoo-presto --restart=always -h presto -p 8080:8080 \ 11 | -v "$PWD"/presto/etc:/presto/etc:Z \ 12 | zhicwu/presto:$PRESTO_TAG 13 | -------------------------------------------------------------------------------- /docker-start: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | 3 | ./docker-stop; 4 | docker-machine start default || echo "already running"; 5 | eval $(docker-machine env) 6 | 7 | docker start datazoo-druid 8 | docker start datazoo-mysql 9 | docker start datazoo-postgres 10 | # docker start datazoo-presto 11 | 12 | echo "Running on: $DOCKER_HOST" 13 | -------------------------------------------------------------------------------- /docker-stop: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | 3 | docker-machine stop default || echo "not running running" 4 | -------------------------------------------------------------------------------- /download-latest-druid-rc: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | 3 | rm -rf druid-0.9.2-rc2; 4 | curl http://static.druid.io/artifacts/releases/druid-0.9.2-rc2-bin.tar.gz | tar xz; 5 | rm -rf druid-override; 6 | mv druid-0.9.2-rc2 druid-override; 7 | -------------------------------------------------------------------------------- /druid/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM imply/imply:2.5.8 2 | 3 | COPY init /tmp/init 4 | RUN chmod +x /tmp/init 5 | RUN /tmp/init 6 | -------------------------------------------------------------------------------- /druid/channel-lookup/channel-lookup.json: -------------------------------------------------------------------------------- 1 | { "key": "en", "value": "English" } 2 | { "key": "es", "value": "Spanish" } 3 | { "key": "fr", "value": "French" } 4 | { "key": "he", "value": "Hebrew" } 5 | { "key": "ru", "value": "Russian" } 6 | -------------------------------------------------------------------------------- /druid/cp-logs: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | 3 | cp var/sv/*.log /opt/data/logs/ 4 | -------------------------------------------------------------------------------- /druid/init: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | 3 | echo "Set Druid props..."; 4 | 5 | # "druid-namespace-lookup" = legacy lookups module 6 | # ToDo: "druid-s3-extensions" 7 | perl -pi -e's/druid.extensions.loadList=.*/druid.extensions.loadList=["druid-lookups-cached-global", "druid-histogram", "druid-datasketches", "druid-kafka-indexing-service", "druid-parser-route"]/g' \ 8 | /opt/imply/conf-quickstart/druid/_common/common.runtime.properties 9 | 10 | # Init for groupBy: V2 11 | echo 'druid.processing.numMergeBuffers=1' \ 12 | >> /opt/imply/conf-quickstart/druid/_common/common.runtime.properties 13 | 14 | # Bring JS back 15 | echo 'druid.javascript.enabled=true' \ 16 | >> /opt/imply/conf-quickstart/druid/_common/common.runtime.properties 17 | 18 | # Use doubles for real (for 0.11.x) 19 | echo 'druid.indexing.doubleStorage=double' \ 20 | >> /opt/imply/conf-quickstart/druid/_common/common.runtime.properties 21 | 22 | # Temp fix, remove when https://github.com/druid-io/druid/pull/5097 is in 23 | echo 'druid.lookup.numLookupLoadingThreads=1' \ 24 | >> /opt/imply/conf-quickstart/druid/_common/common.runtime.properties 25 | 26 | # Heap dump if needed 27 | echo '-XX:+HeapDumpOnOutOfMemoryError' \ 28 | >> /opt/imply/conf-quickstart/druid/historical/jvm.config 29 | -------------------------------------------------------------------------------- /druid/load-data: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | 3 | echo "Sending index job..."; 4 | bin/post-index-task -f /opt/data/druid/wikipedia-index.json; 5 | 6 | echo "Sending compact index job..."; 7 | bin/post-index-task -f /opt/data/druid/wikipedia-compact-index.json; 8 | 9 | curl -s -X POST -d '{}' http://localhost:8081/druid/coordinator/v1/lookups/config --header "Content-Type:application/json" > /dev/null 10 | 11 | curl -s -X POST -d @/opt/data/druid/lookup-init.json http://localhost:8081/druid/coordinator/v1/lookups/config --header "Content-Type:application/json" > /dev/null 12 | -------------------------------------------------------------------------------- /druid/lookup-init.json: -------------------------------------------------------------------------------- 1 | { 2 | "__default": { 3 | "channel-lookup": { 4 | "version": "b", 5 | "lookupExtractorFactory": { 6 | "type": "map", 7 | "map": { 8 | "en": "English", 9 | "es": "Spanish", 10 | "fr": "French", 11 | "he": "Hebrew", 12 | "ru": "Russian" 13 | } 14 | } 15 | }, 16 | "nato-phonetic": { 17 | "version": "b", 18 | "lookupExtractorFactory": { 19 | "type": "cachedNamespace", 20 | "extractionNamespace": { 21 | "type": "uri", 22 | "namespace": "nato-phonetic", 23 | "uri": "file:/opt/data/druid/nato-phonetic/nato-phonetic.json", 24 | "namespaceParseSpec": { 25 | "format": "customJson", 26 | "keyFieldName": "key", 27 | "valueFieldName": "value" 28 | }, 29 | "pollPeriod": "P1D" 30 | }, 31 | "firstCacheTimeout": 120000 32 | } 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /druid/nato-phonetic/nato-phonetic.json: -------------------------------------------------------------------------------- 1 | { "key": "A", "value": "Alfa" } 2 | { "key": "B", "value": "Bravo" } 3 | { "key": "C", "value": "Charlie" } 4 | { "key": "D", "value": "Delta" } 5 | { "key": "E", "value": "Echo" } 6 | { "key": "F", "value": "Foxtrot" } 7 | { "key": "G", "value": "Golf" } 8 | { "key": "H", "value": "Hotel" } 9 | { "key": "I", "value": "India" } 10 | { "key": "J", "value": "Juliett" } 11 | { "key": "K", "value": "Kilo" } 12 | { "key": "L", "value": "Lima" } 13 | { "key": "M", "value": "Mike" } 14 | { "key": "N", "value": "November" } 15 | { "key": "O", "value": "Oscar" } 16 | { "key": "P", "value": "Papa" } 17 | { "key": "Q", "value": "Quebec" } 18 | { "key": "R", "value": "Romeo" } 19 | { "key": "S", "value": "Sierra" } 20 | { "key": "T", "value": "Tango" } 21 | { "key": "U", "value": "Uniform" } 22 | { "key": "V", "value": "Victor" } 23 | { "key": "W", "value": "Whiskey" } 24 | { "key": "X", "value": "X-ray" } 25 | { "key": "Y", "value": "Yankee" } 26 | { "key": "Z", "value": "Zulu" } 27 | { "key": "-", "value": "Dash" } 28 | -------------------------------------------------------------------------------- /druid/override-imply: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | 3 | echo "Overriding Imply..."; 4 | 5 | rm -rf conf/ 6 | mkdir -p conf/ 7 | cp -r /opt/data/imply/conf/* conf/ 8 | 9 | rm -rf conf-quickstart/ 10 | mkdir -p conf-quickstart/ 11 | cp -r /opt/data/imply/conf-quickstart/* conf-quickstart/ 12 | 13 | rm -rf dist/ 14 | mkdir -p dist/ 15 | cp -r /opt/data/imply/dist/* dist/ 16 | -------------------------------------------------------------------------------- /druid/wikipedia-compact-index.json: -------------------------------------------------------------------------------- 1 | { 2 | "type" : "index", 3 | "spec" : { 4 | "ioConfig" : { 5 | "type" : "index", 6 | "firehose" : { 7 | "type" : "local", 8 | "baseDir" : "/opt/data/tmp/", 9 | "filter" : "wikipedia.json" 10 | } 11 | }, 12 | "dataSchema" : { 13 | "dataSource" : "wikipedia-compact", 14 | "granularitySpec" : { 15 | "type" : "uniform", 16 | "segmentGranularity" : "day", 17 | "queryGranularity" : "hour", 18 | "intervals" : ["2015-09-12/2015-09-13"] 19 | }, 20 | "parser" : { 21 | "type" : "string", 22 | "parseSpec" : { 23 | "format" : "json", 24 | "dimensionsSpec" : { 25 | "dimensions" : [ 26 | "channel", 27 | "cityName", 28 | { "name" : "commentLength", "type" : "long" }, 29 | "countryIsoCode", 30 | "countryName", 31 | "isAnonymous", 32 | "isMinor", 33 | "isNew", 34 | "isRobot", 35 | "isUnpatrolled", 36 | "metroCode", 37 | "namespace", 38 | "regionIsoCode", 39 | "regionName" 40 | ] 41 | }, 42 | "timestampSpec" : { 43 | "format" : "auto", 44 | "column" : "__time" 45 | } 46 | } 47 | }, 48 | "metricsSpec" : [ 49 | { 50 | "name" : "count", 51 | "type" : "count" 52 | }, 53 | { 54 | "name" : "added", 55 | "type" : "longSum", 56 | "fieldName" : "added" 57 | }, 58 | { 59 | "name" : "deleted", 60 | "type" : "longSum", 61 | "fieldName" : "deleted" 62 | }, 63 | { 64 | "name" : "delta", 65 | "type" : "longSum", 66 | "fieldName" : "delta" 67 | }, 68 | { 69 | "name" : "page_unique", 70 | "type" : "hyperUnique", 71 | "fieldName" : "page" 72 | } 73 | ] 74 | }, 75 | "tuningConfig" : { 76 | "type" : "index", 77 | "targetPartitionSize" : 5000000, 78 | "maxRowsInMemory" : 20000 79 | } 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /druid/wikipedia-index.json: -------------------------------------------------------------------------------- 1 | { 2 | "type" : "index", 3 | "spec" : { 4 | "ioConfig" : { 5 | "type" : "index", 6 | "firehose" : { 7 | "type" : "local", 8 | "baseDir" : "/opt/data/tmp/", 9 | "filter" : "wikipedia.json" 10 | } 11 | }, 12 | "dataSchema" : { 13 | "dataSource" : "wikipedia", 14 | "granularitySpec" : { 15 | "type" : "uniform", 16 | "segmentGranularity" : "day", 17 | "queryGranularity" : "minute", 18 | "intervals" : ["2015-09-12/2015-09-13"] 19 | }, 20 | "parser" : { 21 | "type" : "string", 22 | "parseSpec" : { 23 | "format" : "json", 24 | "dimensionsSpec" : { 25 | "dimensions" : [ 26 | "sometimeLater", 27 | { "name" : "sometimeLaterMs", "type" : "long" }, 28 | "channel", 29 | "cityName", 30 | "comment", 31 | { "name" : "commentLength", "type" : "long" }, 32 | "commentLengthStr", 33 | "countryIsoCode", 34 | "countryName", 35 | { "name" : "deltaBucket100", "type" : "float" }, 36 | "isAnonymous", 37 | "isMinor", 38 | "isNew", 39 | "isRobot", 40 | "isUnpatrolled", 41 | "metroCode", 42 | "namespace", 43 | "page", 44 | "regionIsoCode", 45 | "regionName", 46 | "user", 47 | "userChars" 48 | ] 49 | }, 50 | "timestampSpec" : { 51 | "format" : "auto", 52 | "column" : "__time" 53 | } 54 | } 55 | }, 56 | "metricsSpec" : [ 57 | { 58 | "name" : "count", 59 | "type" : "count" 60 | }, 61 | { 62 | "name" : "added", 63 | "type" : "longSum", 64 | "fieldName" : "added" 65 | }, 66 | { 67 | "name" : "deleted", 68 | "type" : "longSum", 69 | "fieldName" : "deleted" 70 | }, 71 | { 72 | "name" : "delta", 73 | "type" : "longSum", 74 | "fieldName" : "delta" 75 | }, 76 | { 77 | "name" : "max_delta", 78 | "type" : "longMax", 79 | "fieldName" : "delta" 80 | }, 81 | { 82 | "name" : "min_delta", 83 | "type" : "longMin", 84 | "fieldName" : "delta" 85 | }, 86 | { 87 | "name" : "deltaByTen", 88 | "type" : "doubleSum", 89 | "fieldName" : "deltaByTen" 90 | }, 91 | { 92 | "name" : "delta_hist", 93 | "type" : "approxHistogramFold", 94 | "fieldName" : "delta" 95 | }, 96 | { 97 | "name" : "user_unique", 98 | "type" : "hyperUnique", 99 | "fieldName" : "user" 100 | }, 101 | { 102 | "type" : "thetaSketch", 103 | "name" : "user_theta", 104 | "fieldName": "user" 105 | }, 106 | { 107 | "name" : "page_unique", 108 | "type" : "hyperUnique", 109 | "fieldName" : "page" 110 | } 111 | ] 112 | }, 113 | "tuningConfig" : { 114 | "type" : "index", 115 | "targetPartitionSize" : 5000000, 116 | "maxRowsInMemory" : 20000 117 | } 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /info.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/implydata/datazoo/a782f361188ad3d6a9021caf9c1b1c3e2a7c1f35/info.js -------------------------------------------------------------------------------- /make-data-files: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | 3 | # Force this to the working directory 4 | cd "$(dirname "$0")" 5 | 6 | mkdir -p tmp 7 | 8 | # Trim milliseconds for time and seconds for some time later 9 | echo "Extracting and transforming..."; 10 | gzcat wikipedia-2015-09-12.gz \ 11 | | jq -c ' 12 | select("2015-09-12" < .timestamp and .timestamp < "2015-09-13") | 13 | { 14 | __time: .timestamp | sub(".[0-9]{3}Z"; "Z"), 15 | sometimeLater: .timestamp | sub("2015-"; "2016-") | sub(":[0-9]{2}.[0-9]{3}Z"; ":00Z"), 16 | sometimeLaterMs: ((.timestamp | sub("2015-"; "2016-") | sub(":[0-9]{2}.[0-9]{3}Z"; ":00Z") | fromdateiso8601) * 1000), 17 | channel: .channel | sub("#"; "") | sub(".wikipedia"; ""), 18 | cityName, 19 | comment, 20 | commentLength: (.comment | length), 21 | commentLengthStr: (.comment | length) | tostring, 22 | countryIsoCode, 23 | countryName, 24 | deltaBucket100: (.delta/100 | floor | .*100), 25 | isAnonymous: (.countryName != null), 26 | isMinor, 27 | isNew, 28 | isRobot, 29 | isUnpatrolled, 30 | metroCode, 31 | namespace, 32 | page, 33 | regionIsoCode: (if .regionIsoCode != null then .regionIsoCode else (if .isRobot then "null" else null end) end), 34 | regionName, 35 | user, 36 | userChars: (.user | ascii_upcase | split("") | unique), 37 | delta, 38 | added: [.delta, 0] | max, 39 | deleted: [-.delta, 0] | max, 40 | deltaByTen: (.delta / 10) 41 | }' \ 42 | > tmp/wikipedia.json 43 | 44 | echo "Sampling..."; 45 | perl -ne'print if $. % 10 == 0' tmp/wikipedia.json > tmp/wikipedia-sampled.json 46 | 47 | ./make-sql 48 | 49 | cp ./version ./tmp 50 | -------------------------------------------------------------------------------- /make-sql: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | "use strict"; 3 | 4 | var fs = require('fs'); 5 | var byline = require('byline'); 6 | 7 | function formatValueMySQL(v) { 8 | if (v == null) return 'NULL'; 9 | var t = typeof v; 10 | if (t === 'string') return singleQuoteMySQL(v); 11 | return v; 12 | } 13 | 14 | function formatValuePostgres(v) { 15 | if (v == null) return 'NULL'; 16 | var t = typeof v; 17 | if (t === 'string') return singleQuote(v); 18 | if (Array.isArray(v)) return 'ARRAY[' + v.map(formatValuePostgres) + ']'; 19 | 20 | return v; 21 | } 22 | 23 | function singleQuoteMySQL(str) { 24 | return singleQuote(str.replace(/\\/g, "\\\\")); 25 | } 26 | 27 | function singleQuote(str) { 28 | return "'" + str.replace(/'/g, "''") + "'"; 29 | } 30 | 31 | var outMySQL = fs.createWriteStream('tmp/wikipedia-raw-mysql.sql') 32 | var outPostgres = fs.createWriteStream('tmp/wikipedia-raw-postgres.sql') 33 | 34 | var lines = byline(fs.createReadStream('tmp/wikipedia.json', { encoding: 'utf8' })); 35 | var cols = [ 36 | '__time', 37 | 'sometimeLater', 38 | 'channel', 39 | 'cityName', 40 | 'comment', 41 | 'commentLength', 42 | 'commentLengthStr', 43 | 'countryIsoCode', 44 | 'countryName', 45 | 'deltaBucket100', 46 | 'isAnonymous', 47 | 'isMinor', 48 | 'isNew', 49 | 'isRobot', 50 | 'isUnpatrolled', 51 | 'metroCode', 52 | 'namespace', 53 | 'page', 54 | 'regionIsoCode', 55 | 'regionName', 56 | 'user', 57 | 'userChars', 58 | 'delta', 59 | 'added', 60 | 'deleted', 61 | 'deltaByTen' 62 | ]; 63 | var postGres = cols; 64 | var mySql = cols.filter(c => c != 'userChars') 65 | 66 | var lineNumber = 0; 67 | lines.on('data', function(line) { 68 | var d = JSON.parse(line.toString()); 69 | d['__time'] = d['__time'].replace('Z', ''); 70 | d['sometimeLater'] = d['sometimeLater'].replace('Z', ''); 71 | var prefix = lineNumber % 100 ? ', (' : '; INSERT INTO wikipedia_raw VALUES ('; 72 | outMySQL.write(prefix + mySql.map((c) => { return formatValueMySQL(d[c]) }).join(', ') + ')\n', 'utf8'); 73 | outPostgres.write(prefix + cols.map((c) => { return formatValuePostgres(d[c]) }).join(', ') + ')\n', 'utf8'); 74 | lineNumber++; 75 | }) 76 | 77 | lines.on('end', function() { 78 | var v = ';\n'; 79 | outMySQL.write(v, 'utf8'); 80 | outPostgres.write(v, 'utf8'); 81 | }); 82 | -------------------------------------------------------------------------------- /mysql/load-data: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | 3 | echo "Sleep for 15 seconds to make sure the server starts"; 4 | sleep 15; 5 | 6 | echo "Adding Timezone info..."; 7 | mysql_tzinfo_to_sql /usr/share/zoneinfo | mysql -u root mysql 8 | 9 | echo "Setting up MySQL..."; 10 | mysql -u root -e "CREATE DATABASE IF NOT EXISTS datazoo CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;" 11 | 12 | echo "Creating table..."; 13 | mysql -u root datazoo < /opt/data/mysql/wikipedia-create-raw-table.sql; 14 | 15 | echo "Importing data..."; 16 | mysql -u root --default_character_set utf8mb4 datazoo < /opt/data/tmp/wikipedia-raw-mysql.sql; 17 | 18 | echo "Rolling up..."; 19 | mysql -u root datazoo < /opt/data/mysql/wikipedia-rollup.sql; 20 | 21 | echo "Done"; 22 | -------------------------------------------------------------------------------- /mysql/wikipedia-create-raw-table.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS `wikipedia_raw`; 2 | CREATE TABLE `wikipedia_raw` ( 3 | `__time` datetime NOT NULL, 4 | `sometimeLater` timestamp NOT NULL, 5 | `channel` varchar(255) NOT NULL, 6 | `cityName` varchar(255) DEFAULT NULL, 7 | `comment` varchar(300) NOT NULL, 8 | `commentLength` int(11) NOT NULL, 9 | `commentLengthStr` varchar(10) NOT NULL, 10 | `countryIsoCode` varchar(255) DEFAULT NULL, 11 | `countryName` varchar(255) DEFAULT NULL, 12 | `deltaBucket100` int(11) NOT NULL, 13 | `isAnonymous` tinyint(1) NOT NULL, 14 | `isMinor` tinyint(1) NOT NULL, 15 | `isNew` tinyint(1) NOT NULL, 16 | `isRobot` tinyint(1) NOT NULL, 17 | `isUnpatrolled` tinyint(1) NOT NULL, 18 | `metroCode` int(11) DEFAULT NULL, 19 | `namespace` varchar(255) DEFAULT NULL, 20 | `page` varchar(255) DEFAULT NULL, 21 | `regionIsoCode` varchar(255) DEFAULT NULL, 22 | `regionName` varchar(255) DEFAULT NULL, 23 | `user` varchar(255) DEFAULT NULL, 24 | `delta` int(11) NOT NULL, 25 | `added` int(11) NOT NULL, 26 | `deleted` int(11) NOT NULL, 27 | `deltaByTen` float NOT NULL 28 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; 29 | -------------------------------------------------------------------------------- /mysql/wikipedia-rollup.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS `wikipedia`; 2 | 3 | CREATE TABLE `wikipedia` 4 | SELECT 5 | /* Time Spec :-) */ 6 | CONVERT(DATE_FORMAT(`__time`, "%Y-%m-%d %H:%i:00"), DATETIME) AS "__time", /* Rollup queryGranularity: minute */ 7 | 8 | /* Dimensions */ 9 | `sometimeLater`, 10 | `channel`, 11 | `cityName`, 12 | `comment`, 13 | `commentLength`, 14 | `commentLengthStr`, 15 | `countryIsoCode`, 16 | `countryName`, 17 | `deltaBucket100`, 18 | `isAnonymous`, 19 | `isMinor`, 20 | `isNew`, 21 | `isRobot`, 22 | `isUnpatrolled`, 23 | `metroCode`, 24 | `namespace`, 25 | `page`, 26 | `regionIsoCode`, 27 | `regionName`, 28 | `user`, 29 | 30 | /* Measures */ 31 | COUNT(*) AS "count", 32 | SUM(`added`) AS "added", 33 | SUM(`deleted`) AS "deleted", 34 | SUM(`delta`) AS "delta", 35 | MIN(`delta`) AS "min_delta", 36 | MAX(`delta`) AS "max_delta", 37 | SUM(`deltaByTen`) AS "deltaByTen" 38 | 39 | FROM `wikipedia_raw` 40 | GROUP BY 41 | CONVERT(DATE_FORMAT(`__time`, "%Y-%m-%d %H:%i:00"), DATETIME), 42 | `sometimeLater`, 43 | `channel`, 44 | `cityName`, 45 | `comment`, 46 | `commentLength`, 47 | `commentLengthStr`, 48 | `countryIsoCode`, 49 | `countryName`, 50 | `deltaBucket100`, 51 | `isAnonymous`, 52 | `isMinor`, 53 | `isNew`, 54 | `isRobot`, 55 | `isUnpatrolled`, 56 | `metroCode`, 57 | `namespace`, 58 | `page`, 59 | `regionIsoCode`, 60 | `regionName`, 61 | `user`; 62 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "datazoo", 3 | "version": "0.2.0", 4 | "description": "A collection of datasets", 5 | "main": "info.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "repository": { 10 | "type": "git", 11 | "url": "git+https://github.com/implydata/datazoo.git" 12 | }, 13 | "keywords": [ 14 | "datazoo", 15 | "dataset", 16 | "druid", 17 | "mysql", 18 | "postgres", 19 | "prestro" 20 | ], 21 | "author": "Vadim Ogievetsky", 22 | "license": "Apache-2.0", 23 | "bugs": { 24 | "url": "https://github.com/implydata/datazoo/issues" 25 | }, 26 | "homepage": "https://github.com/implydata/datazoo#readme", 27 | "dependencies": { 28 | "byline": "4.2.1", 29 | "through2": "2.0.1" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /postgres/load-data: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | 3 | echo "Sleep for 5 seconds to make sure the server starts"; 4 | sleep 5; 5 | 6 | echo "Creating database..."; 7 | echo "CREATE DATABASE datazoo;" | psql -q; 8 | 9 | echo "Creating table..."; 10 | psql -d datazoo -q < /opt/data/postgres/wikipedia-create-raw-table.sql; 11 | 12 | echo "Importing data..."; 13 | psql -d datazoo -q < /opt/data/tmp/wikipedia-raw-postgres.sql; 14 | 15 | echo "Rolling up..."; 16 | psql -d datazoo -q < /opt/data/postgres/wikipedia-rollup.sql; 17 | 18 | echo "Done"; 19 | -------------------------------------------------------------------------------- /postgres/wikipedia-create-raw-table.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS wikipedia_raw; 2 | CREATE TABLE wikipedia_raw ( 3 | "__time" timestamp NOT NULL, 4 | "sometimeLater" timestamp NOT NULL, 5 | "channel" varchar(255) COLLATE "C" NOT NULL, 6 | "cityName" varchar(255) COLLATE "C" DEFAULT NULL, 7 | "comment" varchar(300) COLLATE "C" NOT NULL, 8 | "commentLength" int NOT NULL, 9 | "commentLengthStr" varchar(10) COLLATE "C" NOT NULL, 10 | "countryIsoCode" varchar(255) COLLATE "C" DEFAULT NULL, 11 | "countryName" varchar(255) COLLATE "C" DEFAULT NULL, 12 | "deltaBucket100" int NOT NULL, 13 | "isAnonymous" boolean NOT NULL, 14 | "isMinor" boolean NOT NULL, 15 | "isNew" boolean NOT NULL, 16 | "isRobot" boolean NOT NULL, 17 | "isUnpatrolled" boolean NOT NULL, 18 | "metroCode" int DEFAULT NULL, 19 | "namespace" varchar(255) COLLATE "C" DEFAULT NULL, 20 | "page" varchar(255) COLLATE "C" DEFAULT NULL, 21 | "regionIsoCode" varchar(255) COLLATE "C" DEFAULT NULL, 22 | "regionName" varchar(255) COLLATE "C" DEFAULT NULL, 23 | "user" varchar(255) COLLATE "C" DEFAULT NULL, 24 | "userChars" char[] DEFAULT NULL, 25 | "delta" int NOT NULL, 26 | "added" int NOT NULL, 27 | "deleted" int NOT NULL, 28 | "deltaByTen" float NOT NULL 29 | ); 30 | -------------------------------------------------------------------------------- /postgres/wikipedia-rollup.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS "wikipedia"; 2 | 3 | CREATE TABLE "wikipedia" AS 4 | SELECT 5 | /* Time Spec :-) */ 6 | date_trunc('minute', "__time") AS "__time", /* Rollup queryGranularity: minute */ 7 | 8 | /* Dimensions */ 9 | "sometimeLater", 10 | "channel", 11 | "cityName", 12 | "comment", 13 | "commentLength", 14 | "commentLengthStr", 15 | "countryIsoCode", 16 | "countryName", 17 | "deltaBucket100", 18 | "isAnonymous", 19 | "isMinor", 20 | "isNew", 21 | "isRobot", 22 | "isUnpatrolled", 23 | "metroCode", 24 | "namespace", 25 | "page", 26 | "regionIsoCode", 27 | "regionName", 28 | "user", 29 | "userChars", 30 | 31 | /* Measures */ 32 | COUNT(*) AS "count", 33 | SUM("added") AS "added", 34 | SUM("deleted") AS "deleted", 35 | SUM("delta") AS "delta", 36 | MIN("delta") AS "min_delta", 37 | MAX("delta") AS "max_delta", 38 | SUM("deltaByTen") AS "deltaByTen" 39 | 40 | FROM "wikipedia_raw" 41 | GROUP BY 42 | date_trunc('minute', "__time"), 43 | "sometimeLater", 44 | "channel", 45 | "cityName", 46 | "comment", 47 | "commentLength", 48 | "commentLengthStr", 49 | "countryIsoCode", 50 | "countryName", 51 | "deltaBucket100", 52 | "isAnonymous", 53 | "isMinor", 54 | "isNew", 55 | "isRobot", 56 | "isUnpatrolled", 57 | "metroCode", 58 | "namespace", 59 | "page", 60 | "regionIsoCode", 61 | "regionName", 62 | "user", 63 | "userChars"; 64 | 65 | -------------------------------------------------------------------------------- /presto/etc/catalog/mysql.properties: -------------------------------------------------------------------------------- 1 | connector.name=mysql 2 | connection-url=jdbc:mysql://192.168.99.100:3306 3 | connection-user=root 4 | -------------------------------------------------------------------------------- /presto/etc/catalog/postgresql.properties: -------------------------------------------------------------------------------- 1 | connector.name=postgresql 2 | connection-url=jdbc:postgresql://192.168.99.100:5432/datazoo 3 | connection-user=root 4 | connection-password=datazoo 5 | -------------------------------------------------------------------------------- /presto/etc/config.properties: -------------------------------------------------------------------------------- 1 | coordinator=true 2 | node-scheduler.include-coordinator=true 3 | query.max-memory=4GB 4 | query.max-memory-per-node=1GB 5 | http-server.http.port=8080 6 | discovery-server.enabled=true 7 | discovery.uri=http://presto:8080 8 | -------------------------------------------------------------------------------- /presto/etc/jvm.config: -------------------------------------------------------------------------------- 1 | -server 2 | -Xmx2G 3 | -XX:+UseG1GC 4 | -XX:G1HeapRegionSize=32M 5 | -XX:+UseGCOverheadLimit 6 | -XX:+ExplicitGCInvokesConcurrent 7 | -XX:+HeapDumpOnOutOfMemoryError 8 | -XX:OnOutOfMemoryError=kill -9 0 9 | -------------------------------------------------------------------------------- /presto/etc/log.properties: -------------------------------------------------------------------------------- 1 | com.facebook.presto=INFO 2 | -------------------------------------------------------------------------------- /presto/etc/node.properties: -------------------------------------------------------------------------------- 1 | node.environment=production 2 | node.id=f41808b3-34bc-11e6-b18d-a45e60f258d9 3 | node.data-dir=/presto/data 4 | -------------------------------------------------------------------------------- /version: -------------------------------------------------------------------------------- 1 | 7 2 | -------------------------------------------------------------------------------- /wikipedia-2015-09-12.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/implydata/datazoo/a782f361188ad3d6a9021caf9c1b1c3e2a7c1f35/wikipedia-2015-09-12.gz --------------------------------------------------------------------------------