├── .gitignore ├── LICENSE ├── README.md └── ddb-streams ├── README.md ├── build.gradle ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── settings.gradle └── src ├── main └── java │ └── org │ └── partiql │ └── tutorials │ └── ddb │ └── streams │ └── CustomerReview.java └── test ├── java └── org │ └── partiql │ └── tutorials │ └── ddb │ └── streams │ ├── AbstractCustomerReviews.java │ ├── CustomerReviewsNewAndOldImages.java │ ├── CustomerReviewsNewImageUpdate.java │ └── utils │ ├── AwsDynamoDbLocalTestUtils.java │ └── CustomerReviewsTsvFileUtils.java └── resources ├── README.md ├── customer_reviews.txt └── customer_reviews_updates.txt /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/vim,java,emacs,gradle,eclipse,code-java,intellij+all 3 | # Edit at https://www.gitignore.io/?templates=vim,java,emacs,gradle,eclipse,code-java,intellij+all 4 | 5 | ### Code-Java ### 6 | # Language Support for Java(TM) by Red Hat extension for Visual Studio Code - https://marketplace.visualstudio.com/items?itemName=redhat.java 7 | 8 | .project 9 | .classpath 10 | factoryConfiguration.json 11 | 12 | ### Eclipse ### 13 | .metadata 14 | bin/ 15 | tmp/ 16 | *.tmp 17 | *.bak 18 | *.swp 19 | *~.nib 20 | local.properties 21 | .settings/ 22 | .loadpath 23 | .recommenders 24 | 25 | # External tool builders 26 | .externalToolBuilders/ 27 | 28 | # Locally stored "Eclipse launch configurations" 29 | *.launch 30 | 31 | # PyDev specific (Python IDE for Eclipse) 32 | *.pydevproject 33 | 34 | # CDT-specific (C/C++ Development Tooling) 35 | .cproject 36 | 37 | # CDT- autotools 38 | .autotools 39 | 40 | # Java annotation processor (APT) 41 | .factorypath 42 | 43 | # PDT-specific (PHP Development Tools) 44 | .buildpath 45 | 46 | # sbteclipse plugin 47 | .target 48 | 49 | # Tern plugin 50 | .tern-project 51 | 52 | # TeXlipse plugin 53 | .texlipse 54 | 55 | # STS (Spring Tool Suite) 56 | .springBeans 57 | 58 | # Code Recommenders 59 | .recommenders/ 60 | 61 | # Annotation Processing 62 | .apt_generated/ 63 | 64 | # Scala IDE specific (Scala & Java development for Eclipse) 65 | .cache-main 66 | .scala_dependencies 67 | .worksheet 68 | 69 | ### Eclipse Patch ### 70 | # Eclipse Core 71 | 72 | # JDT-specific (Eclipse Java Development Tools) 73 | 74 | # Annotation Processing 75 | .apt_generated 76 | 77 | .sts4-cache/ 78 | 79 | ### Emacs ### 80 | # -*- mode: gitignore; -*- 81 | *~ 82 | \#*\# 83 | /.emacs.desktop 84 | /.emacs.desktop.lock 85 | *.elc 86 | auto-save-list 87 | tramp 88 | .\#* 89 | 90 | # Org-mode 91 | .org-id-locations 92 | *_archive 93 | 94 | # flymake-mode 95 | *_flymake.* 96 | 97 | # eshell files 98 | /eshell/history 99 | /eshell/lastdir 100 | 101 | # elpa packages 102 | /elpa/ 103 | 104 | # reftex files 105 | *.rel 106 | 107 | # AUCTeX auto folder 108 | /auto/ 109 | 110 | # cask packages 111 | .cask/ 112 | dist/ 113 | 114 | # Flycheck 115 | flycheck_*.el 116 | 117 | # server auth directory 118 | /server/ 119 | 120 | # projectiles files 121 | .projectile 122 | 123 | # directory configuration 124 | .dir-locals.el 125 | 126 | # network security 127 | /network-security.data 128 | 129 | 130 | ### Intellij+all ### 131 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm 132 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 133 | 134 | # User-specific stuff 135 | .idea/**/workspace.xml 136 | .idea/**/tasks.xml 137 | .idea/**/usage.statistics.xml 138 | .idea/**/dictionaries 139 | .idea/**/shelf 140 | 141 | # Generated files 142 | .idea/**/contentModel.xml 143 | 144 | # Sensitive or high-churn files 145 | .idea/**/dataSources/ 146 | .idea/**/dataSources.ids 147 | .idea/**/dataSources.local.xml 148 | .idea/**/sqlDataSources.xml 149 | .idea/**/dynamic.xml 150 | .idea/**/uiDesigner.xml 151 | .idea/**/dbnavigator.xml 152 | 153 | # Gradle 154 | .idea/**/gradle.xml 155 | .idea/**/libraries 156 | 157 | # Gradle and Maven with auto-import 158 | # When using Gradle or Maven with auto-import, you should exclude module files, 159 | # since they will be recreated, and may cause churn. Uncomment if using 160 | # auto-import. 161 | # .idea/modules.xml 162 | # .idea/*.iml 163 | # .idea/modules 164 | # *.iml 165 | # *.ipr 166 | 167 | # CMake 168 | cmake-build-*/ 169 | 170 | # Mongo Explorer plugin 171 | .idea/**/mongoSettings.xml 172 | 173 | # File-based project format 174 | *.iws 175 | 176 | # IntelliJ 177 | out/ 178 | 179 | # mpeltonen/sbt-idea plugin 180 | .idea_modules/ 181 | 182 | # JIRA plugin 183 | atlassian-ide-plugin.xml 184 | 185 | # Cursive Clojure plugin 186 | .idea/replstate.xml 187 | 188 | # Crashlytics plugin (for Android Studio and IntelliJ) 189 | com_crashlytics_export_strings.xml 190 | crashlytics.properties 191 | crashlytics-build.properties 192 | fabric.properties 193 | 194 | # Editor-based Rest Client 195 | .idea/httpRequests 196 | 197 | # Android studio 3.1+ serialized cache file 198 | .idea/caches/build_file_checksums.ser 199 | 200 | ### Intellij+all Patch ### 201 | # Ignores the whole .idea folder and all .iml files 202 | # See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360 203 | 204 | .idea/ 205 | 206 | # Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-249601023 207 | 208 | *.iml 209 | modules.xml 210 | .idea/misc.xml 211 | *.ipr 212 | 213 | # Sonarlint plugin 214 | .idea/sonarlint 215 | 216 | ### Java ### 217 | # Compiled class file 218 | *.class 219 | 220 | # Log file 221 | *.log 222 | 223 | # BlueJ files 224 | *.ctxt 225 | 226 | # Mobile Tools for Java (J2ME) 227 | .mtj.tmp/ 228 | 229 | # Package Files # 230 | *.jar 231 | *.war 232 | *.nar 233 | *.ear 234 | *.zip 235 | *.tar.gz 236 | *.rar 237 | 238 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 239 | hs_err_pid* 240 | 241 | ### Vim ### 242 | # Swap 243 | [._]*.s[a-v][a-z] 244 | [._]*.sw[a-p] 245 | [._]s[a-rt-v][a-z] 246 | [._]ss[a-gi-z] 247 | [._]sw[a-p] 248 | 249 | # Session 250 | Session.vim 251 | Sessionx.vim 252 | 253 | # Temporary 254 | .netrwhist 255 | # Auto-generated tag files 256 | tags 257 | # Persistent undo 258 | [._]*.un~ 259 | 260 | ### Gradle ### 261 | .gradle 262 | build/ 263 | 264 | # Ignore Gradle GUI config 265 | gradle-app.setting 266 | 267 | # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) 268 | !gradle-wrapper.jar 269 | 270 | # Cache of project 271 | .gradletasknamecache 272 | 273 | # # Work around https://youtrack.jetbrains.com/issue/IDEA-116898 274 | # gradle/wrapper/gradle-wrapper.properties 275 | 276 | ### Gradle Patch ### 277 | **/build/ 278 | 279 | # End of https://www.gitignore.io/api/vim,java,emacs,gradle,eclipse,code-java,intellij+all 280 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # partiql-tutorials 2 | PartiQL Tutorials 3 | -------------------------------------------------------------------------------- /ddb-streams/README.md: -------------------------------------------------------------------------------- 1 | # PartiQL and DynamoDB Streams 2 | 3 | The tutorial shows how to interface PartiQL with user defined data and 4 | use PartiQL queries over user defined data. 5 | 6 | In this tutorial we use data from the [Registry of Open Data on 7 | AWS](https://registry.opendata.aws/amazon-reviews/) to seed a DynamoDB 8 | table and configure [DynamoDB 9 | Streams](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Streams.html) 10 | on that table. We then interface PartiQL with the user defined POJOs 11 | that capture rows from the DynamoDB table and evaluate PartiQL queries 12 | over our data. 13 | 14 | 15 | ## PartiQL implementation overview 16 | 17 | The evaluator follows a typical flow comprised of phases 18 | 19 | [Parser and Compiler Diagram](https://github.com/therapon/partiql-lang-kotlin/blob/2eb7ea4613062aa7b1baeaa875bb911b03f7a4d6/docs/dev/img/parser-compiler.png) 20 | 21 | ### Interfacing with the Evaluator 22 | 23 | Evaluation inside PartiQL relies on the type 24 | [`ExprValue`](https://github.com/therapon/partiql-lang-kotlin/blob/2eb7ea4613062aa7b1baeaa875bb911b03f7a4d6/lang/src/org/partiql/lang/eval/ExprValue.kt#L23). 25 | Evaluation proceeds by manipulating values of type `ExprValue` and 26 | maintaining a map of *bindings*. A binding is an association between a 27 | PartiQL variable (name) and a value, for example when evaluation 28 | starts it is given a map of bindings that maps global names (akin to 29 | tables in other database systems). 30 | 31 | [ExprValue Class Diagram](https://github.com/therapon/partiql-lang-kotlin/blob/2eb7ea4613062aa7b1baeaa875bb911b03f7a4d6/docs/dev/img/expr-value-class.png) 32 | 33 | 34 | ## Tutorial Setup 35 | 36 | The tutorial is self contained. This repository consists of Java code and 37 | JUnit tests. The tests use a local DynamoDB instance and thus do not require 38 | an AWS account. 39 | 40 | ### Test Setup 41 | 42 | As part of each test's setup we get 43 | 44 | 1. a [local DynamoDB](src/test/java/org/partiql/tutorials/ddb/streams/AbstractCustomerReviews.java#L44) instance 45 | 1. a [new DynamoDB table](src/test/java/org/partiql/tutorials/ddb/streams/AbstractCustomerReviews.java#L57) called `CustomerReviews` [configured with DynamoDB Streams](src/test/java/org/partiql/tutorials/ddb/streams/AbstractCustomerReviews.java#L62) 46 | 1. [seed](src/test/java/org/partiql/tutorials/ddb/streams/AbstractCustomerReviews.java#L181) 47 | the `CustomerReviews` table with some customer review [data](src/test/resources/customer_reviews.txt) 48 | 49 | The `CustomerReviews` DynamoDB table has the following structure 50 | 51 | | `customer_id` | `review_id` | `product_title` | `star_rating` | `helpful_votes` | `total_votes` | `verified_purchase` | `review_headline` | 52 | |---------------|-------------|-----------------|---------------|-----------------|---------------|---------------------|-------------------| 53 | | | | | | | | | | 54 | 55 | 56 | ### Mapping DynamoDB rows to POJO 57 | 58 | The DynamoDB library provides a 59 | [Mapper](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DynamoDBMapper.html) 60 | and we use it for our 61 | [`CustomerReview`](src/main/java/org/partiql/tutorials/ddb/streams/CustomerReview.java#L17) 62 | POJO. 63 | 64 | We also provide a mapping from our `CustomerReview` object to an 65 | `ExprValue` object through 66 | [`CustomerReview#asExprValue()`](src/main/java/org/partiql/tutorials/ddb/streams/CustomerReview.java#L159) 67 | method. The method `asExprValue()` maps an instance of `CustomerReview` to an *structure* where 68 | 69 | * each field's name is mapped to a *key* 70 | * each field's value is associated to the corresponding key created in the preceding step 71 | 72 | For example, a `CustomerReview` instance 73 | 74 | | cr: CustomerReview | 75 | |-------------------------| 76 | | customerId = "1" | 77 | | reviewId = "2" | 78 | | productTitle = "t" | 79 | | starRating = 5 | 80 | | helpfulVotes = 6 | 81 | | totalVotes = 7 | 82 | | verifiedPurchase = true | 83 | | reviewHeading = "r" | 84 | 85 | Maps to 86 | 87 | ``` json 88 | { 89 | "customer_id" : "1", 90 | "review_id" : "2", 91 | "product_title" : "t", 92 | "star_rating" : 5, 93 | "helpful_votes" : 6, 94 | "total_votes" : 7, 95 | "verified_purchase": 1, // true -> 1, false -> 0 96 | "review_heading" : "r", 97 | } 98 | ``` 99 | 100 | ### Running a PartiQL Query 101 | 102 | Let's walk through a simple [test](src/test/java/org/partiql/tutorials/ddb/streams/CustomerReviewsNewImageUpdate.java#L30). 103 | 104 | 1. first we [load](src/test/java/org/partiql/tutorials/ddb/streams/CustomerReviewsNewImageUpdate.java#L31) our customer reviews data from a TSV file 105 | 1. we obtain an [iterator](src/test/java/org/partiql/tutorials/ddb/streams/CustomerReviewsNewImageUpdate.java#L32) from the DynamoDB Stream 106 | * `getRecordUpdate` can capture the updated record (`StreamViewType.NEW_IMAGE`) or the old record (`StreamViewType.OLD_IMAGE`) 107 | 1. we define the PartiQL [query](src/test/java/org/partiql/tutorials/ddb/streams/CustomerReviewsNewImageUpdate.java#L35) we want to evaluate as a string. 108 | 1. we [compile](src/test/java/org/partiql/tutorials/ddb/streams/CustomerReviewsNewImageUpdate.java#L41) the query 109 | 1. we [wrap](src/test/java/org/partiql/tutorials/ddb/streams/CustomerReviewsNewImageUpdate.java#L45) our iterator of `ExprVal` into a PartiQL collection (a list) allowing iteration over the data by the PartiQL evaluator 110 | 1. we [create](src/test/java/org/partiql/tutorials/ddb/streams/CustomerReviewsNewImageUpdate.java#L48) a map and [add](src/test/java/org/partiql/tutorials/ddb/streams/CustomerReviewsNewImageUpdate.java#L49) the name `dbbstream` bound to our PartiQL list we created 111 | 1. using the map we created in the previous step we then create a `Session` and use our map to create our [global bindings](src/test/java/org/partiql/tutorials/ddb/streams/CustomerReviewsNewImageUpdate.java#L51) 112 | 1. finally we [evaluate our query](src/test/java/org/partiql/tutorials/ddb/streams/CustomerReviewsNewImageUpdate.java#L55) under the global bindings and print the result 113 | -------------------------------------------------------------------------------- /ddb-streams/build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id 'java' 3 | } 4 | 5 | group 'org.partiql.tutorials' 6 | version '1.0-SNAPSHOT' 7 | 8 | sourceCompatibility = 1.8 9 | 10 | repositories { 11 | 12 | maven { 13 | name "DynamoDB Local Release Repository - US West (Oregon) Region" 14 | url "https://s3-us-west-2.amazonaws.com/dynamodb-local/release" 15 | } 16 | 17 | 18 | mavenCentral() 19 | } 20 | 21 | dependencies { 22 | implementation platform('com.amazonaws:aws-java-sdk-bom:1.11.674') 23 | 24 | compile group: 'com.amazonaws', name: 'aws-java-sdk-dynamodb', version: '1.11.674' 25 | compile group: 'org.partiql', name: 'partiql-lang-kotlin', version: '0.1.1' 26 | compile group: 'org.jetbrains.kotlin', name: 'kotlin-stdlib', version: '1.3.60' 27 | 28 | testImplementation group: 'org.junit.jupiter', name: 'junit-jupiter', version: '5.5.2' 29 | 30 | testCompile group: 'com.amazonaws', name: 'DynamoDBLocal', version: '1.11.477' 31 | } 32 | 33 | test { 34 | useJUnitPlatform() 35 | testLogging { 36 | events "passed", "skipped", "failed" 37 | } 38 | 39 | } 40 | -------------------------------------------------------------------------------- /ddb-streams/gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/partiql/partiql-tutorials/dc317d5e90adf9a0b86ea6ffb063d66bdf838717/ddb-streams/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /ddb-streams/gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Thu Nov 14 10:27:08 PST 2019 2 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.2.1-all.zip 3 | distributionBase=GRADLE_USER_HOME 4 | distributionPath=wrapper/dists 5 | zipStorePath=wrapper/dists 6 | zipStoreBase=GRADLE_USER_HOME 7 | -------------------------------------------------------------------------------- /ddb-streams/gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Attempt to set APP_HOME 10 | # Resolve links: $0 may be a link 11 | PRG="$0" 12 | # Need this for relative symlinks. 13 | while [ -h "$PRG" ] ; do 14 | ls=`ls -ld "$PRG"` 15 | link=`expr "$ls" : '.*-> \(.*\)$'` 16 | if expr "$link" : '/.*' > /dev/null; then 17 | PRG="$link" 18 | else 19 | PRG=`dirname "$PRG"`"/$link" 20 | fi 21 | done 22 | SAVED="`pwd`" 23 | cd "`dirname \"$PRG\"`/" >/dev/null 24 | APP_HOME="`pwd -P`" 25 | cd "$SAVED" >/dev/null 26 | 27 | APP_NAME="Gradle" 28 | APP_BASE_NAME=`basename "$0"` 29 | 30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 31 | DEFAULT_JVM_OPTS='"-Xmx64m"' 32 | 33 | # Use the maximum available, or set MAX_FD != -1 to use that value. 34 | MAX_FD="maximum" 35 | 36 | warn () { 37 | echo "$*" 38 | } 39 | 40 | die () { 41 | echo 42 | echo "$*" 43 | echo 44 | exit 1 45 | } 46 | 47 | # OS specific support (must be 'true' or 'false'). 48 | cygwin=false 49 | msys=false 50 | darwin=false 51 | nonstop=false 52 | case "`uname`" in 53 | CYGWIN* ) 54 | cygwin=true 55 | ;; 56 | Darwin* ) 57 | darwin=true 58 | ;; 59 | MINGW* ) 60 | msys=true 61 | ;; 62 | NONSTOP* ) 63 | nonstop=true 64 | ;; 65 | esac 66 | 67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 68 | 69 | # Determine the Java command to use to start the JVM. 70 | if [ -n "$JAVA_HOME" ] ; then 71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 72 | # IBM's JDK on AIX uses strange locations for the executables 73 | JAVACMD="$JAVA_HOME/jre/sh/java" 74 | else 75 | JAVACMD="$JAVA_HOME/bin/java" 76 | fi 77 | if [ ! -x "$JAVACMD" ] ; then 78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 79 | 80 | Please set the JAVA_HOME variable in your environment to match the 81 | location of your Java installation." 82 | fi 83 | else 84 | JAVACMD="java" 85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 86 | 87 | Please set the JAVA_HOME variable in your environment to match the 88 | location of your Java installation." 89 | fi 90 | 91 | # Increase the maximum file descriptors if we can. 92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 93 | MAX_FD_LIMIT=`ulimit -H -n` 94 | if [ $? -eq 0 ] ; then 95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 96 | MAX_FD="$MAX_FD_LIMIT" 97 | fi 98 | ulimit -n $MAX_FD 99 | if [ $? -ne 0 ] ; then 100 | warn "Could not set maximum file descriptor limit: $MAX_FD" 101 | fi 102 | else 103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 104 | fi 105 | fi 106 | 107 | # For Darwin, add options to specify how the application appears in the dock 108 | if $darwin; then 109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 110 | fi 111 | 112 | # For Cygwin, switch paths to Windows format before running java 113 | if $cygwin ; then 114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 116 | JAVACMD=`cygpath --unix "$JAVACMD"` 117 | 118 | # We build the pattern for arguments to be converted via cygpath 119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 120 | SEP="" 121 | for dir in $ROOTDIRSRAW ; do 122 | ROOTDIRS="$ROOTDIRS$SEP$dir" 123 | SEP="|" 124 | done 125 | OURCYGPATTERN="(^($ROOTDIRS))" 126 | # Add a user-defined pattern to the cygpath arguments 127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 129 | fi 130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 131 | i=0 132 | for arg in "$@" ; do 133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 135 | 136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 138 | else 139 | eval `echo args$i`="\"$arg\"" 140 | fi 141 | i=$((i+1)) 142 | done 143 | case $i in 144 | (0) set -- ;; 145 | (1) set -- "$args0" ;; 146 | (2) set -- "$args0" "$args1" ;; 147 | (3) set -- "$args0" "$args1" "$args2" ;; 148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 154 | esac 155 | fi 156 | 157 | # Escape application args 158 | save () { 159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 160 | echo " " 161 | } 162 | APP_ARGS=$(save "$@") 163 | 164 | # Collect all arguments for the java command, following the shell quoting and substitution rules 165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 166 | 167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong 168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then 169 | cd "$(dirname "$0")" 170 | fi 171 | 172 | exec "$JAVACMD" "$@" 173 | -------------------------------------------------------------------------------- /ddb-streams/gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | set DIRNAME=%~dp0 12 | if "%DIRNAME%" == "" set DIRNAME=. 13 | set APP_BASE_NAME=%~n0 14 | set APP_HOME=%DIRNAME% 15 | 16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 17 | set DEFAULT_JVM_OPTS="-Xmx64m" 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windows variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | 53 | :win9xME_args 54 | @rem Slurp the command line arguments. 55 | set CMD_LINE_ARGS= 56 | set _SKIP=2 57 | 58 | :win9xME_args_slurp 59 | if "x%~1" == "x" goto execute 60 | 61 | set CMD_LINE_ARGS=%* 62 | 63 | :execute 64 | @rem Setup the command line 65 | 66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 67 | 68 | @rem Execute Gradle 69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 70 | 71 | :end 72 | @rem End local scope for the variables with windows NT shell 73 | if "%ERRORLEVEL%"=="0" goto mainEnd 74 | 75 | :fail 76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 77 | rem the _cmd.exe /c_ return code! 78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 79 | exit /b 1 80 | 81 | :mainEnd 82 | if "%OS%"=="Windows_NT" endlocal 83 | 84 | :omega 85 | -------------------------------------------------------------------------------- /ddb-streams/settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'ddbstreams' 2 | 3 | -------------------------------------------------------------------------------- /ddb-streams/src/main/java/org/partiql/tutorials/ddb/streams/CustomerReview.java: -------------------------------------------------------------------------------- 1 | package org.partiql.tutorials.ddb.streams; 2 | 3 | import com.amazon.ion.IonStruct; 4 | import com.amazon.ion.IonSystem; 5 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDB; 6 | import com.amazonaws.services.dynamodbv2.datamodeling.*; 7 | import com.amazonaws.services.dynamodbv2.model.AttributeValue; 8 | import org.partiql.lang.eval.ExprValue; 9 | import org.partiql.lang.eval.ExprValueFactory; 10 | 11 | import java.util.Map; 12 | 13 | /** 14 | * POJO for records in CustomerReviews DynamoDB table. 15 | * Uses the DynamoDB mapper (see: https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DynamoDBMapper.html) 16 | */ 17 | @DynamoDBTable(tableName = "CustomerReviews") 18 | public class CustomerReview { 19 | 20 | String customerId; 21 | String reviewId; 22 | String productTitle; 23 | int starRating; 24 | int helpfulVotes; 25 | int totalVotes; 26 | boolean verifiedPurchase; 27 | String reviewHeading; 28 | 29 | public CustomerReview() { 30 | } // needed for DynamoDBMapper#marshallToObject() 31 | 32 | public CustomerReview(String customerId, 33 | String reviewId, 34 | String productTitle, 35 | int starRating, 36 | int helpfulVotes, 37 | int totalVotes, 38 | boolean verifiedPurchase, 39 | String reviewHeading) { 40 | this.customerId = customerId; 41 | this.reviewId = reviewId; 42 | this.productTitle = productTitle; 43 | this.starRating = starRating; 44 | this.helpfulVotes = helpfulVotes; 45 | this.totalVotes = totalVotes; 46 | this.verifiedPurchase = verifiedPurchase; 47 | this.reviewHeading = reviewHeading; 48 | } 49 | 50 | @DynamoDBHashKey(attributeName = "customer_id") 51 | public String getCustomerId() { 52 | return customerId; 53 | } 54 | 55 | public void setCustomerId(String customerId) { 56 | this.customerId = customerId; 57 | } 58 | 59 | @DynamoDBAttribute(attributeName = "review_id") 60 | public String getReviewId() { 61 | return reviewId; 62 | } 63 | 64 | public void setReviewId(String reviewId) { 65 | this.reviewId = reviewId; 66 | } 67 | 68 | @DynamoDBAttribute(attributeName = "product_title") 69 | public String getProductTitle() { 70 | return productTitle; 71 | } 72 | 73 | public void setProductTitle(String productTitle) { 74 | this.productTitle = productTitle; 75 | } 76 | 77 | @DynamoDBAttribute(attributeName = "star_rating") 78 | public int getStarRating() { 79 | return starRating; 80 | } 81 | 82 | public void setStarRating(int starRating) { 83 | this.starRating = starRating; 84 | } 85 | 86 | @DynamoDBAttribute(attributeName = "helpful_votes") 87 | public int getHelpfulVotes() { 88 | return helpfulVotes; 89 | } 90 | 91 | public void setHelpfulVotes(int helpfulVotes) { 92 | this.helpfulVotes = helpfulVotes; 93 | } 94 | 95 | @DynamoDBAttribute(attributeName = "total_votes") 96 | public int getTotalVotes() { 97 | return totalVotes; 98 | } 99 | 100 | public void setTotalVotes(int totalVotes) { 101 | this.totalVotes = totalVotes; 102 | } 103 | 104 | @DynamoDBAttribute(attributeName = "verified_purchase") 105 | public boolean isVerifiedPurchase() { 106 | return verifiedPurchase; 107 | } 108 | 109 | public void setVerifiedPurchase(boolean verifiedPurchase) { 110 | this.verifiedPurchase = verifiedPurchase; 111 | } 112 | 113 | @DynamoDBAttribute(attributeName = "review_heading") 114 | public String getReviewHeading() { 115 | return reviewHeading; 116 | } 117 | 118 | public void setReviewHeading(String reviewHeading) { 119 | this.reviewHeading = reviewHeading; 120 | } 121 | 122 | public void save(AmazonDynamoDB addb) { 123 | DynamoDBMapper mapper = new DynamoDBMapper(addb); 124 | mapper.save(this); 125 | } 126 | 127 | @Override 128 | public String toString() { 129 | return "CustomerReviews{" + 130 | "customerId='" + customerId + '\'' + 131 | ", reviewId='" + reviewId + '\'' + 132 | ", productTitle='" + productTitle + '\'' + 133 | ", starRating=" + starRating + 134 | ", helpfulVotes=" + helpfulVotes + 135 | ", totalVotes=" + totalVotes + 136 | ", verifiedPurchase=" + verifiedPurchase + 137 | ", reviewHeading='" + reviewHeading + '\'' + 138 | '}'; 139 | } 140 | 141 | /** 142 | * Given a map of names to DynamoDB attribute values and a DynamoDB client, 143 | * use the mapper to return an instance of CustomerReview. 144 | * 145 | * @param itemAttributes Map of DynamoDB key to DynamoDB attribute value 146 | * @param mapper DynamoDB mapper 147 | * @return populated instance of CustomerReview 148 | */ 149 | public static CustomerReview marshallIntoObject(Map itemAttributes, DynamoDBMapper mapper) { 150 | return mapper.marshallIntoObject(CustomerReview.class, itemAttributes); 151 | } 152 | 153 | /** 154 | * Given a PartiQL Expression Value Factory create the appropriate PartiQL value for this instance. 155 | * 156 | * @param evf PartiQL expression value factory 157 | * @return PartiQL value corresponding to this instance. 158 | */ 159 | public ExprValue asExprValue(ExprValueFactory evf) { 160 | IonSystem ion = evf.getIon(); 161 | IonStruct result = ion.newEmptyStruct(); 162 | result.add("customer_id", ion.newString(getCustomerId())); 163 | result.add("review_id", ion.newString(getReviewId())); 164 | result.add("product_title", ion.newString(getProductTitle())); 165 | result.add("star_rating", ion.newInt(getStarRating())); 166 | result.add("helpful_votes", ion.newInt(getHelpfulVotes())); 167 | result.add("total_votes", ion.newInt(getTotalVotes())); 168 | result.add("verified_purchase", ion.newBool(isVerifiedPurchase())); 169 | result.add("review_heading", ion.newString(getReviewHeading())); 170 | return evf.newFromIonValue(result); 171 | } 172 | } 173 | -------------------------------------------------------------------------------- /ddb-streams/src/test/java/org/partiql/tutorials/ddb/streams/AbstractCustomerReviews.java: -------------------------------------------------------------------------------- 1 | package org.partiql.tutorials.ddb.streams; 2 | 3 | import com.amazon.ion.IonSystem; 4 | import com.amazon.ion.system.IonSystemBuilder; 5 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDB; 6 | import com.amazonaws.services.dynamodbv2.AmazonDynamoDBStreams; 7 | import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper; 8 | import com.amazonaws.services.dynamodbv2.document.DynamoDB; 9 | import com.amazonaws.services.dynamodbv2.local.embedded.DynamoDBEmbedded; 10 | import com.amazonaws.services.dynamodbv2.local.shared.access.AmazonDynamoDBLocal; 11 | import com.amazonaws.services.dynamodbv2.model.*; 12 | import com.amazonaws.services.dynamodbv2.util.TableUtils; 13 | import org.junit.jupiter.api.AfterAll; 14 | import org.junit.jupiter.api.AfterEach; 15 | import org.junit.jupiter.api.BeforeAll; 16 | import org.junit.jupiter.api.BeforeEach; 17 | import org.partiql.lang.CompilerPipeline; 18 | import org.partiql.lang.eval.ExprValue; 19 | import org.partiql.lang.eval.ExprValueFactory; 20 | import org.partiql.tutorials.ddb.streams.utils.AwsDynamoDbLocalTestUtils; 21 | import org.partiql.tutorials.ddb.streams.utils.CustomerReviewsTsvFileUtils; 22 | 23 | import java.io.IOException; 24 | import java.util.*; 25 | 26 | public class AbstractCustomerReviews { 27 | 28 | private static final IonSystem ION = IonSystemBuilder.standard().build(); 29 | protected static String CUSTOMER_REVIEWS = "CustomerReviews"; 30 | protected final CompilerPipeline pipeline = CompilerPipeline.standard(ION); 31 | protected DynamoDB ddb; 32 | protected ExprValueFactory valueFactory = pipeline.getValueFactory(); 33 | private AmazonDynamoDBLocal localDynamoDB; 34 | private AmazonDynamoDB ddbClient; 35 | private AmazonDynamoDBStreams streamsClient; 36 | private String streamArn; 37 | private DescribeTableResult describeTable; 38 | private DynamoDBMapper mapper; 39 | 40 | protected static void INFO(String... msg) { 41 | System.out.println("[INFO] " + String.join(" ", msg)); 42 | } 43 | 44 | 45 | @BeforeAll 46 | public void setUp() { 47 | AwsDynamoDbLocalTestUtils.initSqLite(); 48 | localDynamoDB = DynamoDBEmbedded.create(); 49 | ddbClient = localDynamoDB.amazonDynamoDB(); 50 | ddb = new DynamoDB(ddbClient); 51 | } 52 | 53 | @AfterAll 54 | public void tearDown() { 55 | ddbClient.shutdown(); 56 | } 57 | 58 | @BeforeEach 59 | public void testSetUp() { 60 | createTable(CUSTOMER_REVIEWS); 61 | describeTable = ddbClient.describeTable(CUSTOMER_REVIEWS); 62 | streamArn = describeTable.getTable().getLatestStreamArn(); 63 | INFO("ARN :", streamArn); 64 | StreamSpecification streamSpec = describeTable.getTable().getStreamSpecification(); 65 | streamsClient = localDynamoDB.amazonDynamoDBStreams(); 66 | mapper = new DynamoDBMapper(ddbClient); 67 | } 68 | 69 | @AfterEach 70 | public void testTearDown() { 71 | deleteTable(CUSTOMER_REVIEWS); 72 | } 73 | 74 | /** 75 | * Grab all records from all shards and turn them into ExprValues. 76 | * 77 | * @param viewType stream view type NEW_IMAGE or OLD_IMAGE used to select the image from within the record. 78 | * @return iterator over #newImage() from each record in the stream 79 | */ 80 | protected Iterable getRecordUpdates(StreamViewType viewType) { 81 | Deque shardsQ = getShardIds(); 82 | Iterator it = makeIterator(shardsQ, viewType); 83 | return () -> it; 84 | } 85 | 86 | protected Iterator makeIterator(Deque shardsQ, 87 | StreamViewType viewType) { 88 | if (StreamViewType.NEW_IMAGE == viewType) { 89 | return new NewImageExprValueIterator(shardsQ); 90 | } else if (StreamViewType.OLD_IMAGE == viewType) { 91 | return new OldImageExprValueIterator(shardsQ); 92 | } else { 93 | throw new IllegalStateException("Only accept NEW_IMAGE or OLD_IMAGE for view type"); 94 | } 95 | } 96 | 97 | /** 98 | * Gets all the shard Ids as a Queue. 99 | * See https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Streams.LowLevel.Walkthrough.html 100 | * 101 | * @return queue of all shard Ids 102 | */ 103 | private Deque getShardIds() { 104 | // get the starting shard iterators--probably insufficient if the topology of shards change... 105 | Deque shardIters = new ArrayDeque<>(); 106 | String lastEvaluatedShardId = null; 107 | do { 108 | DescribeStreamResult describeStreamResult = streamsClient.describeStream( 109 | new DescribeStreamRequest() 110 | .withStreamArn(streamArn) 111 | .withExclusiveStartShardId(lastEvaluatedShardId)); 112 | List shards = describeStreamResult.getStreamDescription().getShards(); 113 | 114 | for (Shard shard : shards) { 115 | String shardId = shard.getShardId(); 116 | GetShardIteratorRequest getShardIteratorRequest = new GetShardIteratorRequest() 117 | .withStreamArn(streamArn) 118 | .withShardId(shardId) 119 | .withShardIteratorType(ShardIteratorType.TRIM_HORIZON); 120 | GetShardIteratorResult getShardIteratorResult = 121 | streamsClient.getShardIterator(getShardIteratorRequest); 122 | shardIters.add(getShardIteratorResult.getShardIterator()); 123 | } 124 | 125 | lastEvaluatedShardId = describeStreamResult.getStreamDescription().getLastEvaluatedShardId(); 126 | 127 | } while (lastEvaluatedShardId != null); 128 | INFO("Found " + shardIters.size() + " starting shard iterators"); 129 | if (shardIters.isEmpty()) { 130 | throw new IllegalStateException("No stream shards"); 131 | } else { 132 | return shardIters; 133 | } 134 | } 135 | 136 | /** 137 | * Create the DynamoDB table programmatically. 138 | * See https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/GettingStarted.Java.01.html 139 | * @param tableName DynamoDB table name 140 | */ 141 | private void createTable(String tableName) { 142 | 143 | try { 144 | 145 | List keySchema = Collections.singletonList(new KeySchemaElement() 146 | .withAttributeName("customer_id"). 147 | withKeyType(KeyType.HASH)); 148 | 149 | List attributeDefinitions = Collections.singletonList(new AttributeDefinition() 150 | .withAttributeName("customer_id") 151 | .withAttributeType("S")); 152 | 153 | // streaming 154 | StreamSpecification streamSpec = getStreamSpecification(); 155 | 156 | CreateTableRequest request = new CreateTableRequest() 157 | .withTableName(tableName) 158 | .withKeySchema(keySchema) 159 | .withAttributeDefinitions(attributeDefinitions) 160 | .withProvisionedThroughput(new ProvisionedThroughput() 161 | .withReadCapacityUnits(10L) 162 | .withWriteCapacityUnits((long) 5)) 163 | .withStreamSpecification(streamSpec); 164 | 165 | INFO("Issuing CreateTable request for", tableName); 166 | TableUtils.createTableIfNotExists(ddbClient, request); 167 | TableUtils.waitUntilActive(ddbClient, CUSTOMER_REVIEWS); 168 | } catch (Exception e) { 169 | System.err.println("CreateTable request failed for " + tableName); 170 | System.err.println(e.getMessage()); 171 | } 172 | } 173 | 174 | protected StreamSpecification getStreamSpecification() { 175 | return new StreamSpecification() 176 | .withStreamEnabled(true) 177 | .withStreamViewType(StreamViewType.NEW_IMAGE); 178 | } 179 | 180 | protected void loadSampleData(String path) throws IOException { 181 | List customerReviews = CustomerReviewsTsvFileUtils.getCustomerReviews(path); 182 | customerReviews.stream().forEach(cr -> mapper.save(cr)); // save to dynamoDB using mapper. 183 | } 184 | 185 | private void deleteTable(String tableName) { 186 | TableUtils.deleteTableIfExists(ddbClient, new DeleteTableRequest().withTableName(tableName)); 187 | } 188 | 189 | 190 | protected class OldImageExprValueIterator extends NewImageExprValueIterator { 191 | 192 | public OldImageExprValueIterator(Deque shardsQ) { 193 | super(shardsQ); 194 | } 195 | 196 | @Override 197 | public ExprValue next() { 198 | if (!hasNext()) { 199 | throw new NoSuchElementException("Iterator exhausted"); 200 | } 201 | 202 | // wrap the next record into a value 203 | Map oldImage = currRecords.removeFirst().getDynamodb().getOldImage(); 204 | if (oldImage == null) { 205 | return valueFactory.getNullValue(); 206 | } else { 207 | return CustomerReview.marshallIntoObject(oldImage, mapper).asExprValue(valueFactory); 208 | } 209 | } 210 | } 211 | protected class NewImageExprValueIterator implements Iterator { 212 | 213 | private final Deque shardsQ; 214 | Deque currRecords; 215 | 216 | public NewImageExprValueIterator(Deque shardsQ) { 217 | this.shardsQ = shardsQ; 218 | currRecords = new ArrayDeque<>(); 219 | } 220 | 221 | @Override 222 | public boolean hasNext() { 223 | Set emptyIters = new HashSet<>(); 224 | while (currRecords.isEmpty()) { 225 | if (emptyIters.size() == shardsQ.size()) { 226 | // all iterators previous returned nothing so we end the iterator 227 | return false; 228 | } 229 | 230 | String currShardIter = shardsQ.removeFirst(); 231 | 232 | GetRecordsResult getRecordsResult = streamsClient.getRecords(new GetRecordsRequest() 233 | .withShardIterator(currShardIter)); 234 | List records = getRecordsResult.getRecords(); 235 | INFO("Found " + records.size() + " records"); 236 | if (!records.isEmpty()) { 237 | // remove current shard iterator from the empty list as we got something 238 | emptyIters.remove(currShardIter); 239 | } 240 | currRecords.addAll(records); 241 | String nextShardIter = getRecordsResult.getNextShardIterator(); 242 | if (nextShardIter != null) { 243 | shardsQ.addLast(nextShardIter); 244 | } 245 | if (records.isEmpty()) { 246 | // note that the next iter came from an empty result 247 | emptyIters.add(nextShardIter); 248 | } 249 | if (shardsQ.isEmpty()) { 250 | // exhausted all of the iterators (closed) 251 | return false; 252 | } 253 | } 254 | 255 | return true; 256 | } 257 | 258 | @Override 259 | public ExprValue next() { 260 | if (!hasNext()) { 261 | throw new NoSuchElementException("Iterator exhausted"); 262 | } 263 | 264 | // wrap the next record into a value 265 | Map newImage = currRecords.removeFirst().getDynamodb().getNewImage(); 266 | return CustomerReview.marshallIntoObject(newImage, mapper).asExprValue(valueFactory); 267 | } 268 | } 269 | } 270 | -------------------------------------------------------------------------------- /ddb-streams/src/test/java/org/partiql/tutorials/ddb/streams/CustomerReviewsNewAndOldImages.java: -------------------------------------------------------------------------------- 1 | package org.partiql.tutorials.ddb.streams; 2 | 3 | import com.amazonaws.services.dynamodbv2.model.StreamSpecification; 4 | import com.amazonaws.services.dynamodbv2.model.StreamViewType; 5 | import com.google.common.collect.Lists; 6 | 7 | import kotlin.Pair; 8 | import org.junit.jupiter.api.Test; 9 | import org.junit.jupiter.api.TestInstance; 10 | import org.partiql.lang.eval.Bindings; 11 | import org.partiql.lang.eval.EvaluationSession; 12 | import org.partiql.lang.eval.ExprValue; 13 | import org.partiql.lang.eval.Expression; 14 | 15 | import java.io.IOException; 16 | import java.util.*; 17 | import java.util.stream.Stream; 18 | 19 | @TestInstance(TestInstance.Lifecycle.PER_CLASS) 20 | public class CustomerReviewsNewAndOldImages extends AbstractCustomerReviews { 21 | 22 | 23 | public static final int LINE_LENGTH = 80; 24 | private static String CUSTOMER_REVIEWS_DATA = "src/test/resources/customer_reviews.txt"; 25 | private static String CUSTOMER_REVIEWS_UPDATES = "src/test/resources/customer_reviews_updates.txt"; 26 | 27 | 28 | @Test 29 | void reviewsWith5Stars() throws IOException { 30 | loadSampleData(CUSTOMER_REVIEWS_DATA); 31 | loadSampleData(CUSTOMER_REVIEWS_UPDATES); 32 | Iterable newImagesExprVals = getRecordUpdates(StreamViewType.NEW_IMAGE); 33 | Iterable oldImagesExprVals = getRecordUpdates(StreamViewType.OLD_IMAGE); 34 | ArrayList lnew = Lists.newArrayList(newImagesExprVals); // JOIN needs to re-iterate the list, iterators are read once 35 | ArrayList lold = Lists.newArrayList(oldImagesExprVals); 36 | 37 | 38 | // PartiQL query 39 | String q1 = "SELECT o.customer_id AS id FROM oldImages AS o"; 40 | String q1_filterNulls = "SELECT o.customer_id AS id FROM oldImages AS o WHERE o.customer_id IS NOT NULL"; 41 | 42 | String q2 = "SELECT n.customer_id AS id FROM newImages AS n"; 43 | String q3 = "SELECT n.customer_id AS nid, " + 44 | "o.customer_id AS oid, " + 45 | "n.star_rating AS nstar, " + 46 | "o.star_rating AS ostar " + 47 | "FROM newImages AS n JOIN oldImages AS o ON o.customer_id = n.customer_id " + 48 | "WHERE n.star_rating > o.star_rating"; 49 | 50 | List queries = Lists.newArrayList(q1, q1_filterNulls, q2, q3); 51 | 52 | // Compile the queries 53 | Stream> compiledQueries = 54 | queries.stream().map(q -> new Pair<>(q, pipeline.compile(q))); 55 | 56 | // Use the iterable to create a PartiQL collection (value) 57 | // What we would typically refer to as the table in a DB 58 | ExprValue oldImagesPartiQL = valueFactory.newList(lold); 59 | ExprValue newImagesPartiQL = valueFactory.newList(lnew); 60 | 61 | // globals can be thought of as the DB's catalogue 62 | Map globals = new HashMap<>(); 63 | globals.put("newImages", newImagesPartiQL); 64 | globals.put("oldImages", oldImagesPartiQL); 65 | 66 | final EvaluationSession session = EvaluationSession.builder() 67 | .globals(Bindings.ofMap(globals)) 68 | .build(); 69 | 70 | 71 | compiledQueries.forEach(cq -> { 72 | System.out.println(cq.component1() + "\n\t => \n" + cq.component2().eval(session)); 73 | printHL(); 74 | }); 75 | } 76 | 77 | private void printHL() { 78 | for(int i = 0; i < LINE_LENGTH; i++) System.out.print("-"); 79 | System.out.println(); 80 | } 81 | 82 | @Override 83 | protected StreamSpecification getStreamSpecification() { 84 | return new StreamSpecification() 85 | .withStreamEnabled(true) 86 | .withStreamViewType(StreamViewType.NEW_AND_OLD_IMAGES); 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /ddb-streams/src/test/java/org/partiql/tutorials/ddb/streams/CustomerReviewsNewImageUpdate.java: -------------------------------------------------------------------------------- 1 | package org.partiql.tutorials.ddb.streams; 2 | 3 | import com.amazonaws.services.dynamodbv2.document.Item; 4 | import com.amazonaws.services.dynamodbv2.document.ItemCollection; 5 | import com.amazonaws.services.dynamodbv2.document.ScanOutcome; 6 | import com.amazonaws.services.dynamodbv2.document.Table; 7 | import com.amazonaws.services.dynamodbv2.document.spec.ScanSpec; 8 | import com.amazonaws.services.dynamodbv2.document.utils.ValueMap; 9 | import com.amazonaws.services.dynamodbv2.model.StreamViewType; 10 | import org.junit.jupiter.api.Test; 11 | import org.junit.jupiter.api.TestInstance; 12 | import org.partiql.lang.eval.Bindings; 13 | import org.partiql.lang.eval.EvaluationSession; 14 | import org.partiql.lang.eval.ExprValue; 15 | import org.partiql.lang.eval.Expression; 16 | 17 | import java.io.IOException; 18 | import java.util.HashMap; 19 | import java.util.Iterator; 20 | import java.util.Map; 21 | 22 | 23 | @TestInstance(TestInstance.Lifecycle.PER_CLASS) 24 | class CustomerReviewsNewImageUpdate extends AbstractCustomerReviews { 25 | 26 | private static String CUSTOMER_REVIEWS_DATA = "src/test/resources/customer_reviews.txt"; 27 | 28 | 29 | @Test 30 | void reviewsWith5Stars() throws IOException { 31 | loadSampleData(CUSTOMER_REVIEWS_DATA); 32 | Iterable exprVals = getRecordUpdates(StreamViewType.NEW_IMAGE); 33 | 34 | // PartiQL query 35 | String partiQLQuery = 36 | "SELECT s.customer_id, s.star_rating " + 37 | "FROM ddbstream AS s " + 38 | "WHERE s.star_rating = 5"; 39 | 40 | // Compile the query 41 | Expression expr = pipeline.compile(partiQLQuery); 42 | 43 | // Use the iterable to create a PartiQL collection (value) 44 | // What we would typically refer to as the table in a DB 45 | ExprValue partiQLStream = valueFactory.newList(exprVals); 46 | 47 | // globals can be thought of as the DB's catalogue 48 | Map globals = new HashMap<>(); 49 | globals.put("ddbstream", partiQLStream); // ddbstream maps to the partiQL stream 50 | 51 | final EvaluationSession session = EvaluationSession.builder() 52 | .globals(Bindings.ofMap(globals)) 53 | .build(); 54 | 55 | INFO("PartiQL query result is : " + expr.eval(session)); // evaluate the query! 56 | 57 | // DDB query 58 | Table table = ddb.getTable(CUSTOMER_REVIEWS); 59 | ScanSpec scanSpec = new ScanSpec() 60 | .withProjectionExpression("customer_id, star_rating") 61 | .withFilterExpression("star_rating = :stars") 62 | .withValueMap(new ValueMap().withNumber(":stars", 5)); 63 | 64 | try { 65 | ItemCollection items = table.scan(scanSpec); 66 | 67 | Iterator iter = items.iterator(); 68 | while (iter.hasNext()) { 69 | Item item = iter.next(); 70 | INFO(item.toJSON()); 71 | } 72 | 73 | } catch (Exception e) { 74 | System.err.println("Unable to scan the table:"); 75 | System.err.println(e.getMessage()); 76 | } 77 | } 78 | 79 | 80 | } -------------------------------------------------------------------------------- /ddb-streams/src/test/java/org/partiql/tutorials/ddb/streams/utils/AwsDynamoDbLocalTestUtils.java: -------------------------------------------------------------------------------- 1 | package org.partiql.tutorials.ddb.streams.utils; 2 | 3 | import com.google.common.base.Splitter; 4 | import com.google.common.base.Supplier; 5 | import com.google.common.collect.Lists; 6 | 7 | import java.io.File; 8 | import java.util.ArrayList; 9 | import java.util.List; 10 | import java.util.Locale; 11 | 12 | /** 13 | * Helper class for initializing AWS DynamoDB to run with sqlite4java for local testing. 14 | * 15 | * Copied from: https://github.com/redskap/aws-dynamodb-java-example-local-testing 16 | */ 17 | public class AwsDynamoDbLocalTestUtils { 18 | 19 | private static final String BASE_LIBRARY_NAME = "sqlite4java"; 20 | 21 | /** 22 | * Static helper class. 23 | */ 24 | private AwsDynamoDbLocalTestUtils() { 25 | } 26 | 27 | /** 28 | * Sets the sqlite4java library path system parameter if it is not set already. 29 | */ 30 | public static void initSqLite() { 31 | initSqLite(() -> { 32 | final List classPath = getClassPathList(System.getProperty("java.class.path"), File.pathSeparator); 33 | 34 | return getLibPath(System.getProperty("os.name"), System.getProperty("java.runtime.name"), 35 | System.getProperty("os.arch"), classPath); 36 | }); 37 | } 38 | 39 | /** 40 | * Sets the sqlite4java library path system parameter if it is not set already. 41 | * 42 | * @param libPathSupplier Calculates lib path for sqlite4java. 43 | */ 44 | public static void initSqLite(Supplier libPathSupplier) { 45 | if (System.getProperty("sqlite4java.library.path") == null) { 46 | System.setProperty("sqlite4java.library.path", libPathSupplier.get()); 47 | } 48 | } 49 | 50 | /** 51 | * Calculates the possible Library Names for finding the proper sqlite4j native library and returns the directory with the most specific matching library. 52 | * 53 | * @param osName The value of "os.name" system property (System.getProperty("os.name")). 54 | * @param runtimeName The value of "java.runtime.name" system property (System.getProperty("java.runtime.name")). 55 | * @param osArch The value of "os.arch" system property (System.getProperty("os.arch")). 56 | * @param osArch The classpath split into strings by path separator. Value of "java.class.path" system property 57 | * (System.getProperty("os.arch")) split by File.pathSeparator. 58 | * @return 59 | */ 60 | public static String getLibPath(final String osName, final String runtimeName, final String osArch, final List classPath) { 61 | final String os = getOs(osName, runtimeName); 62 | final List libNames = getLibNames(os, getArch(os, osArch)); 63 | 64 | for (final String libName : libNames) { 65 | for (final String classPathLib : classPath) { 66 | if (classPathLib.contains(libName)) { 67 | return new File(classPathLib).getParent(); 68 | } 69 | } 70 | } 71 | 72 | throw new IllegalStateException("SQLite library \"" + libNames + "\" is missing from classpath"); 73 | } 74 | 75 | /** 76 | * Calculates the possible Library Names for finding the proper sqlite4java native library. 77 | * 78 | * Based on the internal calculation of the sqlite4java wrapper Internal 81 | * class. 82 | * 83 | * @param os Operating System Name used by sqlite4java to get native library. 84 | * @param arch Operating System Architecture used by sqlite4java to get native library. 85 | * @return Possible Library Names used by sqlite4java to get native library. 86 | */ 87 | public static List getLibNames(final String os, final String arch) { 88 | List result = new ArrayList<>(); 89 | 90 | final String base = BASE_LIBRARY_NAME + "-" + os; 91 | 92 | result.add(base + "-" + arch); 93 | 94 | if (arch.equals("x86_64") || arch.equals("x64")) { 95 | result.add(base + "-amd64"); 96 | } else if (arch.equals("x86")) { 97 | result.add(base + "-i386"); 98 | } else if (arch.equals("i386")) { 99 | result.add(base + "-x86"); 100 | } else if (arch.startsWith("arm") && arch.length() > 3) { 101 | if (arch.length() > 5 && arch.startsWith("armv") && Character.isDigit(arch.charAt(4))) { 102 | result.add(base + "-" + arch.substring(0, 5)); 103 | } 104 | result.add(base + "-arm"); 105 | } 106 | 107 | result.add(base); 108 | result.add(BASE_LIBRARY_NAME); 109 | 110 | return result; 111 | } 112 | 113 | /** 114 | * Calculates the Operating System Architecture for finding the proper sqlite4java native library. 115 | * 116 | * Based on the internal calculation of the sqlite4java wrapper Internal 119 | * class. 120 | * 121 | * @param osArch The value of "os.arch" system property (System.getProperty("os.arch")). 122 | * @param os Operating System Name used by sqlite4java to get native library. 123 | * @return Operating System Architecture used by sqlite4java to get native library. 124 | */ 125 | public static String getArch(final String os, final String osArch) { 126 | String result; 127 | 128 | if (osArch == null) { 129 | result = "x86"; 130 | } else { 131 | final String loweCaseOsArch = osArch.toLowerCase(Locale.US); 132 | result = loweCaseOsArch; 133 | if ("win32".equals(os) && "amd64".equals(loweCaseOsArch)) { 134 | result = "x64"; 135 | } 136 | } 137 | 138 | return result; 139 | } 140 | 141 | /** 142 | * Calculates the Operating System Name for finding the proper sqlite4java native library. 143 | * 144 | * Based on the internal calculation of the sqlite4java wrapper Internal 147 | * class.* 148 | * 149 | * @param osName The value of "os.name" system property (System.getProperty("os.name")). 150 | * @param runtimeName The value of "java.runtime.name" system property (System.getProperty("java.runtime.name")). 151 | * @return Operating System Name used by sqlite4java to get native library. 152 | */ 153 | public static String getOs(final String osName, final String runtimeName) { 154 | 155 | String result; 156 | if (osName == null) { 157 | result = "linux"; 158 | } else { 159 | final String loweCaseOsName = osName.toLowerCase(Locale.US); 160 | if (loweCaseOsName.startsWith("mac") || loweCaseOsName.startsWith("darwin") || loweCaseOsName.startsWith("os x")) { 161 | result = "osx"; 162 | } else if (loweCaseOsName.startsWith("windows")) { 163 | result = "win32"; 164 | } else { 165 | if (runtimeName != null && runtimeName.toLowerCase(Locale.US).contains("android")) { 166 | result = "android"; 167 | } else { 168 | result = "linux"; 169 | } 170 | } 171 | } 172 | 173 | return result; 174 | } 175 | 176 | /** 177 | * Splits classpath string by path separator value. 178 | * 179 | * @param classPath Value of "java.class.path" system property (System.getProperty("os.arch")). 180 | * @param pathSeparator Value of path separator (File.pathSeparator). 181 | * @return The list of each classpath elements. 182 | */ 183 | public static List getClassPathList(final String classPath, final String pathSeparator) { 184 | return Lists.newArrayList(Splitter.on(pathSeparator).split(classPath)); 185 | } 186 | 187 | } -------------------------------------------------------------------------------- /ddb-streams/src/test/java/org/partiql/tutorials/ddb/streams/utils/CustomerReviewsTsvFileUtils.java: -------------------------------------------------------------------------------- 1 | package org.partiql.tutorials.ddb.streams.utils; 2 | 3 | 4 | import org.junit.jupiter.api.Test; 5 | import org.partiql.tutorials.ddb.streams.CustomerReview; 6 | 7 | import java.io.IOException; 8 | import java.nio.file.Files; 9 | import java.nio.file.Paths; 10 | import java.util.ArrayList; 11 | import java.util.List; 12 | 13 | /** 14 | * Helper class to read data from TSV file. 15 | */ 16 | public class CustomerReviewsTsvFileUtils { 17 | 18 | private static String TSV_FILE_PATH = "src/test/resources/customer_reviews.txt"; 19 | 20 | 21 | public static List getCustomerReviews(String path) throws IOException { 22 | List customerReviews = new ArrayList<>(); 23 | 24 | Files.lines(Paths.get(path)).forEach( 25 | line -> { 26 | String[] tsvRow = line.split("\t"); 27 | customerReviews.add(new CustomerReview(tsvRow[0], 28 | tsvRow[1], 29 | tsvRow[2], 30 | Integer.valueOf(tsvRow[3]), 31 | Integer.valueOf(tsvRow[4]), 32 | Integer.valueOf(tsvRow[5]), 33 | parseYorN(tsvRow[6]), 34 | tsvRow[7])); 35 | }); 36 | return customerReviews; 37 | } 38 | 39 | 40 | 41 | private static boolean parseYorN(String s) { 42 | String yOrN = s.trim(); 43 | return ("Y".equals(yOrN) || "y".equals(yOrN)); 44 | } 45 | 46 | @Test 47 | public void readTsv() throws IOException { 48 | List customerReviews = getCustomerReviews(TSV_FILE_PATH); 49 | System.out.println(customerReviews.size()); 50 | System.out.println(customerReviews); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /ddb-streams/src/test/resources/README.md: -------------------------------------------------------------------------------- 1 | # Sample Data for Testing 2 | 3 | This folder contains data used for testing. 4 | 5 | ## Customer Reviews (`customer_reviews.txt`) 6 | 7 | The file contains customer review data obtained from the [Registry of Open 8 | Data on AWS](https://registry.opendata.aws/amazon-reviews/). The data 9 | has been modified, we removed some columns and some rows, specifically, 10 | the columns in [customer_reviews.txt](customer_reviews.txt) are 11 | 12 | 13 | 1. `customer_id` 14 | 1. `review_id` 15 | 1. `product_title` 16 | 1. `star_rating` 17 | 1. `helpful_votes` 18 | 1. `total_votes` 19 | 1. `verified_purchase` 20 | 1. `review_headline` 21 | 22 | We have also removed 23 | 24 | * the first line (header line) in the original tsv file 25 | * removed rows to decrease the size of the data set 26 | 27 | ## Customer Reviews Updates (`customer_reviews_update.txt`) 28 | 29 | This is a file we create. The file contains similar lines as `customer_reviews.txt` but with some values altered. 30 | We use this file as an example of updates to existing records in our DynamoDB table. -------------------------------------------------------------------------------- /ddb-streams/src/test/resources/customer_reviews.txt: -------------------------------------------------------------------------------- 1 | 16199106 R203HPW78Z7N4K FGGF3032MW Gallery Series 30" Wide Freestanding Gas Range 5 Sealed Burners Easy Temperature Probe Express-Select Controls One-Touch Self Clean: 5 0 0 Y If you need a new stove, this is a winner. 2 | 16374060 R2EAIGVLEALSP3 Best Hand Clothes Wringer 5 1 1 Y Five Stars 3 | 15322085 R1K1CD73HHLILA Supco SET184 Thermal Cutoff Kit 5 0 0 Y Fast Shipping 4 | 32004835 R2KZBMOFRMYOPO Midea WHS-160RB1 Compact Single Reversible Door Refrigerator and Freezer, 4.4 Cubic Feet, Black 5 1 1 Y Five Stars 5 | 25414497 R6BIZOZY6UD01 Avalon Bay Portable Ice Maker 5 0 0 Y Five Stars 6 | 36311751 R1MCXZFNF8E7Y0 Danby Freestanding Wine Cooler 1 0 0 Y Piece of Junk 7 | 30920961 R3EMB3E3ODR6BW Avanti 110-Volt Automatic Portable Compact Dryer with Stainless Drum and See-Thru Window 5 2 2 Y Works awesome for apt size 110 dryer 8 | 52491265 RJTONVTTOPJ5S Danby products 5 0 0 Y Five Stars 9 | 48166169 R21U5QZ2CQECUM 3 Pack Tier1 MSWF GE SmartWater Replacement Refrigerator Water Filter 4 0 0 Y Four Stars 10 | 50394924 RL2BBC51H89DH True TSSU-60-16 60" 2 Door Refrigerated Sandwich Prep Table | (16) 1/6 Pan Capacity 4 0 0 Y but has poor insulation in the top -------------------------------------------------------------------------------- /ddb-streams/src/test/resources/customer_reviews_updates.txt: -------------------------------------------------------------------------------- 1 | 16374060 R2EAIGVLEALSP3 Best Hand Clothes Wringer 4 1 3 Y Five Stars 2 | 36311751 R1MCXZFNF8E7Y0 Danby Freestanding Wine Cooler 4 0 3 Y Piece of Junk --------------------------------------------------------------------------------