├── .gitignore
├── .travis.yml
├── LICENSE
├── LSQL_JDBC.jpg
├── README.md
├── build.gradle
├── doc
└── adr
│ └── 0001-use-the-Value-projection-only.md
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── settings.gradle
└── src
├── main
├── kotlin
│ └── io
│ │ └── lenses
│ │ └── jdbc4
│ │ ├── AvroSchemas.kt
│ │ ├── BaseResultSet.kt
│ │ ├── Constants.kt
│ │ ├── Handler.kt
│ │ ├── IWrapper.kt
│ │ ├── JacksonSupport.kt
│ │ ├── LConnection.kt
│ │ ├── LDatabaseMetaData.kt
│ │ ├── LensesDriver.kt
│ │ ├── Schemas.kt
│ │ ├── TypeInfo.kt
│ │ ├── Utils.kt
│ │ ├── Versions.kt
│ │ ├── avroExtensions.kt
│ │ ├── client
│ │ ├── AuthenticationException.kt
│ │ ├── Credentials.kt
│ │ ├── JdbcRequestMessage.kt
│ │ └── LensesClient.kt
│ │ ├── mappers
│ │ ├── SelectFieldsMapper.kt
│ │ ├── ShowTableTypesMapper.kt
│ │ └── ShowTablesMapper.kt
│ │ ├── normalizeRecord.kt
│ │ ├── resultset
│ │ ├── AbstractResultSet.kt
│ │ ├── AvroSchemaResultSetMetaData.kt
│ │ ├── EmptyResultSetMetaData.kt
│ │ ├── EmptyRow.kt
│ │ ├── ImmutableResultSet.kt
│ │ ├── ListResultSet.kt
│ │ ├── PullForwardOnlyResultSet.kt
│ │ ├── ResultSetIterator.kt
│ │ ├── RowResultSet.kt
│ │ ├── UnsupportedTypesResultSet.kt
│ │ ├── WebSocketResultSet.kt
│ │ ├── filter.kt
│ │ └── map.kt
│ │ ├── row
│ │ ├── ConvertingRow.kt
│ │ ├── LongRowId.kt
│ │ ├── Row.kt
│ │ └── rows.kt
│ │ ├── statements
│ │ ├── LStatement.kt
│ │ ├── ReadOnlyStatement.kt
│ │ ├── SelectPreparedStatement.kt
│ │ └── statements.kt
│ │ └── util
│ │ └── Logging.kt
└── resources
│ └── lsql.versions
└── test
├── kotlin
└── io
│ └── lenses
│ └── jdbc4
│ ├── AvroSchemaResultSetMetaDataTest.kt
│ ├── BatchInsertTest.kt
│ ├── BatchNestedInsertStressTest.kt
│ ├── FrameToRecordTest.kt
│ ├── JdbcRequestMessageTest.kt
│ ├── LDatabaseMetaDataTest.kt
│ ├── LStatementTest.kt
│ ├── LensesDriverTest.kt
│ ├── NormalizeRecordTest.kt
│ ├── PrecisionQueryTest.kt
│ ├── PreparedInsertTest.kt
│ ├── ProducerSetup.kt
│ ├── RestClientTest.kt
│ ├── ScaleQueryTest.kt
│ ├── data
│ ├── CCData.kt
│ ├── EquitiesData.kt
│ ├── LocationData.kt
│ ├── MovieData.kt
│ └── Samples.java
│ ├── queries
│ ├── CreateTableTest.kt
│ ├── DeleteTest.kt
│ ├── DescribeTableTest.kt
│ ├── InsertTest.kt
│ ├── PollTopicViaJdbcTest.kt
│ ├── SelectTest.kt
│ ├── ShowFunctionsTest.kt
│ ├── ShowTablesTest.kt
│ ├── ShowVirtualTablesTest.kt
│ ├── SingleFieldSchemaQueryTest.kt
│ ├── SynonymTest.kt
│ ├── TruncateTest.kt
│ └── ViewTest.kt
│ └── resultset
│ ├── ConvertingRowTest.kt
│ └── ListResultSetTest.kt
└── resources
├── jdbc_sql_payments.json
├── keystore.jks
├── log4j2.xml
├── messages.json
└── topics.json
/.gitignore:
--------------------------------------------------------------------------------
1 | *.class
2 | *.log
3 | .idea
4 | *.iml
5 | pom.xml.versionsBackup
6 |
7 | # Scala-IDE specific
8 | .scala_dependencies
9 | .classpath
10 | .project
11 | .settings
12 | .gradle
13 | .vscode
14 | bin/
15 | build/
16 | out/
17 | .kotlintest
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | sudo: required
2 |
3 | language: java
4 |
5 | services:
6 | - docker
7 |
8 | cache:
9 | directories:
10 | - $HOME/.m2
11 | - $HOME/.gradle
12 |
13 | before_install:
14 | - docker pull landoop/kafka-lenses-dev:2.0
15 |
16 | install:
17 | - docker run -d -e ADV_HOST=127.0.0.1 -e EULA="https://dl.lenses.stream/d/?id=2d3c06ae-5b98-47e4-bb47-233b01f89621" --rm -p 3030:3030 -p 9092:9092 -p 2181:2181 -p 8081:8081 -p 9581:9581 -p 9582:9582 -p 9584:9584 -p 9585:9585 landoop/kafka-lenses-dev:2.0
18 |
19 | before_script:
20 | - sleep 45
21 |
22 | script: ./gradlew wrapper --gradle-version 4.6 clean test --stacktrace
--------------------------------------------------------------------------------
/LSQL_JDBC.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lensesio/lenses-jdbc/4ea41f66879c6e506b42432f6899cf6376f26957/LSQL_JDBC.jpg
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [
](http://lenses.stream/dev/jdbc/index.html)
2 | [
](http://search.maven.org/#search%7Cga%7C1%7Ca%3A%22lenses-jdbc%22)
3 | [](https://travis-ci.org/Landoop/lenses-jdbc)
4 |
5 | # LSQL JDBC Library
6 |
7 | ![alt text][logo]
8 |
9 | [logo]: https://github.com/Landoop/lenses-jdbc/blob/master/LSQL_JDBC.jpg "LENSES JDBC for Apache Kafka"
10 |
11 |
12 | A JDBC 4.0 compliant driver for [Lenses](https://www.lenses.io/), suitable for any application that uses the JDBC interface, to communicate with Apache Kafka via the Lenses platform.
13 |
14 | Users of this library can:
15 |
16 | * Select from topics
17 | * Insert into topics
18 | * Use prepared statements
19 | * Use batched inserts
20 | * Fetch metadata around topics
21 | * Fetch metadata around messages
22 |
23 | ## Documentation
24 |
25 | Documentation can be found [here](https://docs.lenses.io/3.2/dev/jdbc/index.html).
26 |
27 | ## Download
28 |
29 | Download from [Maven Central](http://search.maven.org/#search%7Cgav%7C1%7Cg%3A%22io.lenses%22%20AND%20a%3A%22lenses-jdbc%22).
30 |
31 | ## Requirements
32 |
33 | 1. Java 1.8+
34 | 2. Gradle 4.9+
35 |
36 | # Building
37 |
38 | Requires Gradle 4.9+
39 |
40 | To build:
41 |
42 | ```bash
43 | ./gradlew compileKotlin
44 | ```
45 |
46 | For a fatJar:
47 |
48 | ```bash
49 | ./gradlew shadowJar
50 | ```
51 |
52 | # Testing
53 | For testing it requires the Lenses Box to be running locally on http://localhost:3030
54 |
55 | ```bash
56 | ./gradlew clean test
57 | ```
58 |
59 | # License
60 |
61 | The project is licensed under the Apache 2 license.
62 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | buildscript {
2 | ext.kotlin_version = '1.3.50'
3 | ext.kotlin_test = '3.4.0'
4 | ext.log4j_version = '2.11.2'
5 | ext.slf4j_version = '1.7.25'
6 | ext.avro_version = '1.9.0'
7 | ext.httpclient_version = '4.5.7'
8 | ext.jackson_version = '2.9.9'
9 | ext.kafka_version = '2.3.0'
10 | ext.ktorVersion = "1.2.3"
11 | repositories {
12 | maven {
13 | url "https://plugins.gradle.org/m2/"
14 | }
15 | jcenter()
16 | mavenCentral()
17 | }
18 | dependencies {
19 | classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
20 | classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.3'
21 | classpath "org.jetbrains.dokka:dokka-gradle-plugin:0.9.16"
22 | classpath 'io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.11.0'
23 | classpath 'net.researchgate:gradle-release:2.7.0'
24 | }
25 | }
26 |
27 |
28 | group 'io.lenses'
29 |
30 | apply plugin: 'com.github.johnrengelman.shadow'
31 | apply plugin: 'java'
32 | apply plugin: "kotlin"
33 |
34 | apply plugin: 'maven-publish'
35 | apply plugin: 'signing'
36 | apply plugin: 'maven'
37 | apply plugin: 'org.jetbrains.dokka'
38 | apply plugin: 'io.codearte.nexus-staging'
39 | apply plugin: 'net.researchgate.release'
40 |
41 | repositories {
42 | mavenCentral()
43 | maven {
44 | url "https://packages.confluent.io/maven"
45 | }
46 | }
47 |
48 | dependencies {
49 |
50 | compile "org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version"
51 | compile "org.jetbrains.kotlin:kotlin-reflect:$kotlin_version"
52 | compile "org.slf4j:slf4j-api:$slf4j_version"
53 | compile "org.apache.httpcomponents:httpclient:$httpclient_version"
54 | compile "com.fasterxml.jackson.core:jackson-core:$jackson_version"
55 | compile "com.fasterxml.jackson.core:jackson-databind:$jackson_version"
56 | compile "com.fasterxml.jackson.core:jackson-annotations:$jackson_version"
57 | compile "com.fasterxml.jackson.module:jackson-module-kotlin:$jackson_version"
58 | compile "org.apache.avro:avro:$avro_version"
59 | compile 'org.glassfish.tyrus:tyrus-client:1.15'
60 | compile 'org.glassfish.tyrus:tyrus-core:1.15'
61 | compile 'org.glassfish.tyrus:tyrus-container-grizzly-client:1.15'
62 | compile "io.ktor:ktor-client-core:$ktorVersion"
63 | compile "io.ktor:ktor-client-cio:$ktorVersion"
64 | compile "io.ktor:ktor-client-jackson:$ktorVersion"
65 | compile "io.ktor:ktor-client-websockets:$ktorVersion"
66 | compile "io.ktor:ktor-client-encoding:$ktorVersion"
67 | compile "io.ktor:ktor-client-encoding-jvm:$ktorVersion"
68 | compile 'org.springframework:spring-websocket:5.1.9.RELEASE'
69 |
70 | implementation 'io.arrow-kt:arrow-core-data:0.9.0'
71 | //compile 'jakarta.websocket:jakarta.websocket-api:1.1.1'
72 |
73 | testCompile "org.apache.logging.log4j:log4j-slf4j-impl:$log4j_version"
74 | testCompile "io.kotlintest:kotlintest-runner-junit5:$kotlin_test"
75 | testCompile "org.apache.kafka:kafka-clients:$kafka_version"
76 | testCompile 'io.confluent:kafka-avro-serializer:3.2.1'
77 | testCompile 'org.nanohttpd:nanohttpd:2.3.1'
78 | }
79 |
80 | publishing {
81 | publications {
82 | mavenJava(MavenPublication) {
83 | from components.java
84 | }
85 | }
86 | }
87 |
88 | compileKotlin {
89 | kotlinOptions {
90 | jvmTarget = "1.8"
91 | }
92 | }
93 |
94 | compileTestKotlin {
95 | kotlinOptions {
96 | jvmTarget = "1.8"
97 | }
98 | }
99 |
100 | // mainClassName = "com.landoop.jdbc.Driver"
101 |
102 | jar {
103 | manifest {
104 | attributes "Main-Class": "io.lenses.jdbc4.LensesDriver"
105 | }
106 | }
107 |
108 | shadowJar {
109 | zip64 true
110 | classifier = null
111 | manifest {
112 | attributes "Main-Class": "io.lenses.jdbc4.LensesDriver"
113 | relocate 'com.fasterxml', 'shadow.com.fasterxml'
114 | relocate 'org.springframework', 'shadow.org.springframework'
115 | }
116 | }
117 |
118 | test {
119 | useJUnitPlatform()
120 | testLogging {
121 | events "PASSED", "FAILED", "SKIPPED", "STANDARD_OUT", "STANDARD_ERROR"
122 | }
123 | }
124 |
125 | dokka {
126 | includes = []
127 |
128 | linkMapping {
129 | dir = "src/main/kotlin"
130 | url = "https://github.com/Landoop/lenses-jdbc"
131 | suffix = "#L"
132 | }
133 |
134 | outputFormat = "javadoc"
135 | outputDirectory = javadoc.destinationDir
136 | }
137 |
138 | task dokkaJavadoc(type: org.jetbrains.dokka.gradle.DokkaTask, dependsOn: javadoc) {
139 | outputFormat = "javadoc"
140 | outputDirectory = javadoc.destinationDir
141 | }
142 |
143 | task packageJavadoc(type: Jar, dependsOn: dokkaJavadoc) {
144 | from javadoc.destinationDir
145 | classifier = 'javadoc'
146 | }
147 |
148 | task packageSources(type: Jar, dependsOn: 'classes') {
149 | from sourceSets.main.allSource
150 | classifier = 'sources'
151 | }
152 |
153 | task copyToLib(type: Copy) {
154 | into "$buildDir/libs"
155 | from configurations.runtime
156 | }
157 |
158 | build.dependsOn(copyToLib)
159 |
160 | artifacts {
161 | archives jar
162 | archives packageJavadoc
163 | archives packageSources
164 | }
165 |
166 | signing {
167 | required { gradle.taskGraph.hasTask("uploadArchives") }
168 | sign configurations.archives
169 | }
170 |
171 | // OSSRH publication
172 | if (project.hasProperty('release')) {
173 | uploadArchives {
174 | repositories {
175 | mavenDeployer {
176 | // POM signature
177 | beforeDeployment { MavenDeployment deployment -> signing.signPom(deployment) }
178 | // Target repository
179 | repository(url: "https://oss.sonatype.org/service/local/staging/deploy/maven2/") {
180 | authentication(userName: ossrhUsername, password: ossrhPassword)
181 | }
182 | pom.project {
183 | name project.name
184 | description project.description
185 | packaging 'jar'
186 | url 'https://github.com/landoop/lenses-jdbc'
187 |
188 | scm {
189 | connection 'scm:git:https://github.com/landoop/lenses-jdbc.git'
190 | developerConnection 'scm:git:git@github.com:landoop/lenses-jdbc.git'
191 | url 'https://github.com/landoop/lenses-jdbc.git'
192 | }
193 |
194 | licenses {
195 | license {
196 | name 'Apache License 2.0'
197 | url 'http://www.apache.org/licenses/LICENSE-2.0.html'
198 | distribution 'repo'
199 | }
200 | }
201 |
202 | developers {
203 | developer {
204 | id = 'sksamuel'
205 | name = 'Stephen Samuel'
206 | }
207 | developer {
208 | id = 'stheppi'
209 | name = 'Stefan Bocutiu'
210 | email = 'stefan@landoop.com'
211 | }
212 | }
213 | }
214 | }
215 | }
216 | }
217 | }
218 |
--------------------------------------------------------------------------------
/doc/adr/0001-use-the-Value-projection-only.md:
--------------------------------------------------------------------------------
1 | # 1. Use only the Value projection
2 |
3 | Date: 2019-08-27
4 |
5 | ## Status
6 |
7 | Accepted
8 |
9 | ## Context
10 |
11 |
12 | Kafka record contains amongst other things a Key and a Value. This does not go well with a `SELECT *`
13 | when using tabular data structure like JDBC requires.
14 |
15 | The previous context relies on building a JDBC row by merging the data from Key and Value.
16 | This results in problems on retrieving the column based on the name and index since it relies only on the schema for the Value.
17 | Merging the Key and Value schema is not trivial and neither is intuitive for the user.
18 |
19 | The engine returns a lot more information including the metadata for a record.
20 |
21 | ## Decision
22 |
23 | Only consider the data returned in the Value.
24 |
25 | ## Consequences
26 |
27 | To get the Key value/fields it requires the user to perform a projection lifting the key:
28 |
29 | ```$sql
30 | SELECT *, _key as key
31 | FROM curious_eyes
32 | ```
33 |
34 | The same applies for getting the record metadata
35 |
36 | ```$sql
37 | SELECT *, _meta.offset as _offset, _meta.partition as _partition
38 | FROM curious_eyes
39 | ```
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | kotlin.incremental=true
2 | version=3.0.2
3 |
4 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lensesio/lenses-jdbc/4ea41f66879c6e506b42432f6899cf6376f26957/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | #Wed Apr 25 00:32:08 BST 2018
2 | distributionBase=GRADLE_USER_HOME
3 | distributionPath=wrapper/dists
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 | distributionUrl=https\://services.gradle.org/distributions/gradle-4.9-bin.zip
7 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | ##############################################################################
4 | ##
5 | ## Gradle start up script for UN*X
6 | ##
7 | ##############################################################################
8 |
9 | # Attempt to set APP_HOME
10 | # Resolve links: $0 may be a link
11 | PRG="$0"
12 | # Need this for relative symlinks.
13 | while [ -h "$PRG" ] ; do
14 | ls=`ls -ld "$PRG"`
15 | link=`expr "$ls" : '.*-> \(.*\)$'`
16 | if expr "$link" : '/.*' > /dev/null; then
17 | PRG="$link"
18 | else
19 | PRG=`dirname "$PRG"`"/$link"
20 | fi
21 | done
22 | SAVED="`pwd`"
23 | cd "`dirname \"$PRG\"`/" >/dev/null
24 | APP_HOME="`pwd -P`"
25 | cd "$SAVED" >/dev/null
26 |
27 | APP_NAME="Gradle"
28 | APP_BASE_NAME=`basename "$0"`
29 |
30 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
31 | DEFAULT_JVM_OPTS=""
32 |
33 | # Use the maximum available, or set MAX_FD != -1 to use that value.
34 | MAX_FD="maximum"
35 |
36 | warn () {
37 | echo "$*"
38 | }
39 |
40 | die () {
41 | echo
42 | echo "$*"
43 | echo
44 | exit 1
45 | }
46 |
47 | # OS specific support (must be 'true' or 'false').
48 | cygwin=false
49 | msys=false
50 | darwin=false
51 | nonstop=false
52 | case "`uname`" in
53 | CYGWIN* )
54 | cygwin=true
55 | ;;
56 | Darwin* )
57 | darwin=true
58 | ;;
59 | MINGW* )
60 | msys=true
61 | ;;
62 | NONSTOP* )
63 | nonstop=true
64 | ;;
65 | esac
66 |
67 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
68 |
69 | # Determine the Java command to use to start the JVM.
70 | if [ -n "$JAVA_HOME" ] ; then
71 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
72 | # IBM's JDK on AIX uses strange locations for the executables
73 | JAVACMD="$JAVA_HOME/jre/sh/java"
74 | else
75 | JAVACMD="$JAVA_HOME/bin/java"
76 | fi
77 | if [ ! -x "$JAVACMD" ] ; then
78 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
79 |
80 | Please set the JAVA_HOME variable in your environment to match the
81 | location of your Java installation."
82 | fi
83 | else
84 | JAVACMD="java"
85 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
86 |
87 | Please set the JAVA_HOME variable in your environment to match the
88 | location of your Java installation."
89 | fi
90 |
91 | # Increase the maximum file descriptors if we can.
92 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
93 | MAX_FD_LIMIT=`ulimit -H -n`
94 | if [ $? -eq 0 ] ; then
95 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
96 | MAX_FD="$MAX_FD_LIMIT"
97 | fi
98 | ulimit -n $MAX_FD
99 | if [ $? -ne 0 ] ; then
100 | warn "Could not set maximum file descriptor limit: $MAX_FD"
101 | fi
102 | else
103 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
104 | fi
105 | fi
106 |
107 | # For Darwin, add options to specify how the application appears in the dock
108 | if $darwin; then
109 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
110 | fi
111 |
112 | # For Cygwin, switch paths to Windows format before running java
113 | if $cygwin ; then
114 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
115 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
116 | JAVACMD=`cygpath --unix "$JAVACMD"`
117 |
118 | # We build the pattern for arguments to be converted via cygpath
119 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
120 | SEP=""
121 | for dir in $ROOTDIRSRAW ; do
122 | ROOTDIRS="$ROOTDIRS$SEP$dir"
123 | SEP="|"
124 | done
125 | OURCYGPATTERN="(^($ROOTDIRS))"
126 | # Add a user-defined pattern to the cygpath arguments
127 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
128 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
129 | fi
130 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
131 | i=0
132 | for arg in "$@" ; do
133 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
134 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
135 |
136 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
137 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
138 | else
139 | eval `echo args$i`="\"$arg\""
140 | fi
141 | i=$((i+1))
142 | done
143 | case $i in
144 | (0) set -- ;;
145 | (1) set -- "$args0" ;;
146 | (2) set -- "$args0" "$args1" ;;
147 | (3) set -- "$args0" "$args1" "$args2" ;;
148 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
149 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
150 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
151 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
152 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
153 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
154 | esac
155 | fi
156 |
157 | # Escape application args
158 | save () {
159 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
160 | echo " "
161 | }
162 | APP_ARGS=$(save "$@")
163 |
164 | # Collect all arguments for the java command, following the shell quoting and substitution rules
165 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
166 |
167 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
168 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
169 | cd "$(dirname "$0")"
170 | fi
171 |
172 | exec "$JAVACMD" "$@"
173 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @if "%DEBUG%" == "" @echo off
2 | @rem ##########################################################################
3 | @rem
4 | @rem Gradle startup script for Windows
5 | @rem
6 | @rem ##########################################################################
7 |
8 | @rem Set local scope for the variables with windows NT shell
9 | if "%OS%"=="Windows_NT" setlocal
10 |
11 | set DIRNAME=%~dp0
12 | if "%DIRNAME%" == "" set DIRNAME=.
13 | set APP_BASE_NAME=%~n0
14 | set APP_HOME=%DIRNAME%
15 |
16 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
17 | set DEFAULT_JVM_OPTS=
18 |
19 | @rem Find java.exe
20 | if defined JAVA_HOME goto findJavaFromJavaHome
21 |
22 | set JAVA_EXE=java.exe
23 | %JAVA_EXE% -version >NUL 2>&1
24 | if "%ERRORLEVEL%" == "0" goto init
25 |
26 | echo.
27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
28 | echo.
29 | echo Please set the JAVA_HOME variable in your environment to match the
30 | echo location of your Java installation.
31 |
32 | goto fail
33 |
34 | :findJavaFromJavaHome
35 | set JAVA_HOME=%JAVA_HOME:"=%
36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
37 |
38 | if exist "%JAVA_EXE%" goto init
39 |
40 | echo.
41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
42 | echo.
43 | echo Please set the JAVA_HOME variable in your environment to match the
44 | echo location of your Java installation.
45 |
46 | goto fail
47 |
48 | :init
49 | @rem Get command-line arguments, handling Windows variants
50 |
51 | if not "%OS%" == "Windows_NT" goto win9xME_args
52 |
53 | :win9xME_args
54 | @rem Slurp the command line arguments.
55 | set CMD_LINE_ARGS=
56 | set _SKIP=2
57 |
58 | :win9xME_args_slurp
59 | if "x%~1" == "x" goto execute
60 |
61 | set CMD_LINE_ARGS=%*
62 |
63 | :execute
64 | @rem Setup the command line
65 |
66 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
67 |
68 | @rem Execute Gradle
69 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
70 |
71 | :end
72 | @rem End local scope for the variables with windows NT shell
73 | if "%ERRORLEVEL%"=="0" goto mainEnd
74 |
75 | :fail
76 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
77 | rem the _cmd.exe /c_ return code!
78 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
79 | exit /b 1
80 |
81 | :mainEnd
82 | if "%OS%"=="Windows_NT" endlocal
83 |
84 | :omega
85 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | rootProject.name = 'lenses-jdbc'
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/AvroSchemas.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import org.apache.avro.LogicalTypes
4 | import org.apache.avro.Schema
5 | import java.sql.SQLException
6 | import java.sql.SQLFeatureNotSupportedException
7 | import java.sql.Types
8 |
9 | object AvroSchemas {
10 |
11 | fun normalizedName(schema: Schema): String {
12 | return when (schema.type) {
13 | Schema.Type.UNION -> schema.fromUnion().type.name
14 | else -> schema.type.name
15 | }
16 | }
17 |
18 | /**
19 | * @return the JVM fully qualified classname for the given avro type.
20 | */
21 | fun jvmClassName(type: Schema.Type): String {
22 | return when (type) {
23 | Schema.Type.BOOLEAN -> java.lang.Boolean::class.java.canonicalName
24 | Schema.Type.BYTES -> byteArrayOf(1)::class.java.canonicalName
25 | Schema.Type.ENUM -> java.lang.String::class.java.canonicalName
26 | Schema.Type.DOUBLE -> java.lang.Double::class.java.canonicalName
27 | Schema.Type.FLOAT -> java.lang.Float::class.java.canonicalName
28 | Schema.Type.INT -> java.lang.Integer::class.java.canonicalName
29 | Schema.Type.LONG -> java.lang.Long::class.java.canonicalName
30 | Schema.Type.STRING -> java.lang.String::class.java.canonicalName
31 | else -> throw SQLException("Unknown class name for $type")
32 | }
33 | }
34 |
35 | fun sqlType(schema: Schema): Int {
36 | return when (schema.type) {
37 | Schema.Type.ARRAY -> Types.ARRAY
38 | Schema.Type.BOOLEAN -> Types.BOOLEAN
39 | Schema.Type.BYTES ->
40 | when (schema.logicalType) {
41 | null -> Types.BINARY
42 | is LogicalTypes.Decimal -> Types.DECIMAL
43 | else -> {
44 | if (schema.logicalType.name == "uuid") Types.VARCHAR
45 | else Types.BINARY
46 | }
47 | }
48 | Schema.Type.DOUBLE -> Types.DOUBLE
49 | Schema.Type.ENUM -> Types.VARCHAR
50 | Schema.Type.FIXED -> Types.BINARY
51 | Schema.Type.FLOAT -> Types.FLOAT
52 | Schema.Type.INT ->
53 | when (schema.logicalType) {
54 | is LogicalTypes.TimeMillis -> Types.TIME
55 | is LogicalTypes.Date -> Types.DATE
56 | else -> Types.INTEGER
57 | }
58 | Schema.Type.LONG ->
59 | when (schema.logicalType) {
60 | is LogicalTypes.TimestampMillis -> Types.TIMESTAMP
61 | is LogicalTypes.TimestampMicros -> Types.TIMESTAMP
62 | is LogicalTypes.TimeMicros -> Types.TIMESTAMP
63 | else -> Types.BIGINT
64 | }
65 | Schema.Type.MAP -> Types.STRUCT
66 | Schema.Type.NULL -> Types.NULL
67 | Schema.Type.RECORD -> Types.STRUCT
68 | Schema.Type.STRING -> Types.VARCHAR
69 | Schema.Type.UNION -> io.lenses.jdbc4.AvroSchemas.sqlType(schema.fromUnion())
70 | else -> throw SQLFeatureNotSupportedException()
71 | }
72 | }
73 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/Constants.kt:
--------------------------------------------------------------------------------
1 | @file:Suppress("MayBeConstant")
2 |
3 | package io.lenses.jdbc4
4 |
5 | object Constants {
6 | val ProductName = "LSQL for Apache Kafka"
7 | val DriverName = "JDBC Driver for LSQL"
8 | val JdbcPrefix = "jdbc:lenses:kafka:"
9 | val DatabaseName = "default"
10 | val LensesTokenHeader = "X-Kafka-Lenses-Token"
11 | val BATCH_HARD_LIMIT = 1000
12 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/Handler.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import java.sql.ResultSet
4 |
5 | interface Handler {
6 | fun execute(): ResultSet
7 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/IWrapper.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import java.sql.SQLException
4 |
5 | interface IWrapper {
6 | fun _isWrapperFor(iface: Class<*>?): Boolean = iface?.isInstance(iface) ?: false
7 | fun _unwrap(iface: Class): T {
8 | try {
9 | return iface.cast(this)
10 | } catch (cce: ClassCastException) {
11 | throw SQLException("Unable to unwrap instance as $iface")
12 | }
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/JacksonSupport.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import com.fasterxml.jackson.annotation.JsonInclude
4 | import com.fasterxml.jackson.core.JsonParser
5 | import com.fasterxml.jackson.databind.DeserializationFeature
6 | import com.fasterxml.jackson.databind.ObjectMapper
7 | import com.fasterxml.jackson.module.kotlin.KotlinModule
8 | import java.io.InputStream
9 |
10 | object JacksonSupport {
11 |
12 | val mapper: ObjectMapper = ObjectMapper().apply {
13 | this.registerModule(KotlinModule())
14 | this.setSerializationInclusion(JsonInclude.Include.NON_NULL)
15 | this.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
16 | this.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false)
17 | this.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true)
18 | this.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true)
19 | this.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true)
20 | }
21 |
22 | fun toJson(t: T): String = io.lenses.jdbc4.JacksonSupport.mapper.writeValueAsString(t)
23 |
24 | inline fun fromJson(json: String): T {
25 | return io.lenses.jdbc4.JacksonSupport.mapper.readValue(json, T::class.java)
26 | }
27 |
28 | inline fun fromJson(stream: InputStream): T {
29 | return io.lenses.jdbc4.JacksonSupport.mapper.readValue(stream, T::class.java)
30 | }
31 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/LConnection.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import io.lenses.jdbc4.client.LensesClient
4 | import io.lenses.jdbc4.client.Credentials
5 | import io.lenses.jdbc4.statements.LStatement
6 | import io.lenses.jdbc4.util.Logging
7 | import java.sql.Blob
8 | import java.sql.CallableStatement
9 | import java.sql.Clob
10 | import java.sql.Connection
11 | import java.sql.DatabaseMetaData
12 | import java.sql.NClob
13 | import java.sql.PreparedStatement
14 | import java.sql.ResultSet
15 | import java.sql.SQLException
16 | import java.sql.SQLFeatureNotSupportedException
17 | import java.sql.SQLWarning
18 | import java.sql.SQLXML
19 | import java.sql.Savepoint
20 | import java.sql.Statement
21 | import java.sql.Struct
22 | import java.util.*
23 | import java.util.concurrent.Executor
24 |
25 | class LConnection(private val uri: String,
26 | props: Properties) : Connection, AutoCloseable, Logging, IWrapper {
27 |
28 | private val user = props.getProperty("user") ?: throw SQLException("URI must specify username")
29 | private val password = props.getProperty("password", null) ?: throw SQLException("URI must specify password")
30 | private val weakSSL = props.getProperty("weakssl", "false")!!.toBoolean()
31 |
32 | private val urls = uri.replace(Constants.JdbcPrefix, "").split(',').apply {
33 | if (this.isEmpty())
34 | throw SQLException("URI must specify at least one REST endpoint")
35 | if (!this.all { it.startsWith("http") || it.startsWith("https") })
36 | throw SQLException("Endpoints must use http or https")
37 | logger.debug("Connection will use urls $this")
38 | }
39 |
40 | private val client = LensesClient(urls.first(), Credentials(user, password), weakSSL)
41 |
42 | override fun getHoldability(): Int = ResultSet.CLOSE_CURSORS_AT_COMMIT
43 |
44 | override fun setNetworkTimeout(executor: Executor?, milliseconds: Int) {}
45 |
46 | override fun abort(executor: Executor?) {
47 | close()
48 | }
49 |
50 | override fun getClientInfo(name: String?): String? = null
51 |
52 | override fun getClientInfo(): Properties = Properties()
53 |
54 | override fun getAutoCommit(): Boolean = false
55 |
56 | override fun setCatalog(catalog: String?) {
57 | // javadoc requires no-op if not supported
58 | }
59 |
60 | override fun getWarnings(): SQLWarning? = null
61 | override fun clearWarnings() {}
62 |
63 | override fun getCatalog(): String? = null
64 | override fun getSchema(): String? = null
65 |
66 | // timeout is ignored, and the default timeout of the client is used
67 | override fun isValid(timeout: Int): Boolean = false
68 |
69 | override fun close() {
70 | client.close()
71 | }
72 |
73 | override fun isClosed(): Boolean = client.isClosed()
74 |
75 | override fun createArrayOf(typeName: String?, elements: Array?): java.sql.Array =
76 | throw SQLFeatureNotSupportedException()
77 |
78 | override fun setReadOnly(readOnly: Boolean) {}
79 | override fun isReadOnly(): Boolean = true
80 |
81 | override fun isWrapperFor(iface: Class<*>?): Boolean = _isWrapperFor(iface)
82 | override fun unwrap(iface: Class): T = _unwrap(iface)
83 |
84 | override fun nativeSQL(sql: String?): String = sql!!
85 |
86 | override fun setClientInfo(name: String?, value: String?) = throw SQLFeatureNotSupportedException()
87 | override fun setClientInfo(properties: Properties?) = throw SQLFeatureNotSupportedException()
88 |
89 | override fun createStatement(): Statement = LStatement(this, client)
90 | override fun prepareStatement(sql: String): PreparedStatement = throw SQLFeatureNotSupportedException()
91 |
92 | override fun getTypeMap(): MutableMap> = throw SQLFeatureNotSupportedException()
93 | override fun getMetaData(): DatabaseMetaData = LDatabaseMetaData(this, client, uri, user)
94 |
95 | override fun setSchema(schema: String?) {
96 | // javadoc requests noop for non-supported
97 | }
98 |
99 | override fun getNetworkTimeout(): Int = 0
100 |
101 | override fun setTypeMap(map: MutableMap>?) = throw SQLFeatureNotSupportedException()
102 |
103 | // -- unsupported prepared statement variants
104 |
105 | override fun prepareStatement(sql: String, resultSetType: Int, resultSetConcurrency: Int): PreparedStatement {
106 | if (resultSetType != ResultSet.TYPE_FORWARD_ONLY)
107 | throw SQLFeatureNotSupportedException("ResultSetType $resultSetType is not supported")
108 | if (resultSetConcurrency != ResultSet.CONCUR_READ_ONLY)
109 | throw SQLFeatureNotSupportedException("ResultSetConcurrency $resultSetConcurrency is not supported")
110 | return prepareStatement(sql)
111 | }
112 |
113 | override fun prepareStatement(sql: String,
114 | resultSetType: Int,
115 | resultSetConcurrency: Int,
116 | resultSetHoldability: Int): PreparedStatement = prepareStatement(sql,
117 | resultSetType,
118 | resultSetConcurrency)
119 |
120 | override fun prepareStatement(sql: String?,
121 | autoGeneratedKeys: Int): PreparedStatement = throw SQLFeatureNotSupportedException("Use prepareStatement(sql)")
122 |
123 | override fun prepareStatement(sql: String?,
124 | columnIndexes: IntArray?): PreparedStatement = throw SQLFeatureNotSupportedException("Use prepareStatement(sql)")
125 |
126 | override fun prepareStatement(sql: String?,
127 | columnNames: Array?): PreparedStatement = throw SQLFeatureNotSupportedException(
128 | "Use prepareStatement(sql)")
129 |
130 | override fun prepareCall(sql: String?): CallableStatement = throw SQLFeatureNotSupportedException()
131 | override fun prepareCall(sql: String?,
132 | resultSetType: Int,
133 | resultSetConcurrency: Int): CallableStatement = throw SQLFeatureNotSupportedException()
134 |
135 | override fun prepareCall(sql: String?,
136 | resultSetType: Int,
137 | resultSetConcurrency: Int,
138 | resultSetHoldability: Int): CallableStatement = throw SQLFeatureNotSupportedException()
139 |
140 | // -- unsupported create statement methods
141 |
142 | override fun createStatement(resultSetType: Int, resultSetConcurrency: Int): Statement =
143 | throw SQLFeatureNotSupportedException("ResultSet type and ResultSet concurrency are not supported, use the createStatement() function")
144 |
145 | override fun createStatement(resultSetType: Int, resultSetConcurrency: Int, resultSetHoldability: Int): Statement =
146 | throw SQLFeatureNotSupportedException("ResultSet type and ResultSet concurrency are not supported, use the createStatement() function")
147 |
148 | // -- tx methods are unsupported
149 |
150 | override fun setTransactionIsolation(level: Int) {}
151 | override fun getTransactionIsolation(): Int = Connection.TRANSACTION_NONE
152 | override fun setAutoCommit(autoCommit: Boolean) {}
153 | override fun rollback() = throw SQLFeatureNotSupportedException()
154 | override fun rollback(savepoint: Savepoint?) = throw SQLFeatureNotSupportedException()
155 | override fun commit() = throw SQLFeatureNotSupportedException()
156 | override fun setSavepoint(): Savepoint = throw SQLFeatureNotSupportedException()
157 | override fun setSavepoint(name: String?): Savepoint = throw SQLFeatureNotSupportedException()
158 | override fun releaseSavepoint(savepoint: Savepoint?) = throw SQLFeatureNotSupportedException()
159 |
160 | // -- unsupported methods
161 |
162 | override fun createClob(): Clob = throw SQLFeatureNotSupportedException()
163 | override fun createNClob(): NClob = throw SQLFeatureNotSupportedException()
164 | override fun createBlob(): Blob = throw SQLFeatureNotSupportedException()
165 | override fun createSQLXML(): SQLXML = throw SQLFeatureNotSupportedException()
166 | override fun createStruct(typeName: String?,
167 | attributes: Array?): Struct = throw SQLFeatureNotSupportedException()
168 |
169 | override fun setHoldability(holdability: Int) = throw SQLFeatureNotSupportedException()
170 |
171 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/LensesDriver.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import io.lenses.jdbc4.util.Logging
4 | import java.sql.Connection
5 | import java.sql.Driver
6 | import java.sql.DriverManager
7 | import java.sql.DriverPropertyInfo
8 | import java.sql.SQLException
9 | import java.sql.SQLFeatureNotSupportedException
10 | import java.util.*
11 | import java.util.logging.Logger
12 |
13 | class LensesDriver : Driver, Logging, IWrapper {
14 |
15 | companion object : Logging {
16 | init {
17 | logger.debug("Registering LSQL JDBC Driver with DriverManager")
18 | DriverManager.registerDriver(LensesDriver())
19 | }
20 | }
21 |
22 | override fun getParentLogger(): Logger = throw SQLFeatureNotSupportedException()
23 |
24 | override fun getPropertyInfo(url: String?, info: Properties?): Array {
25 | return arrayOf(
26 | DriverPropertyInfo("user", null).apply {
27 | this.required = true
28 | this.description = "Username for credentials"
29 | },
30 | DriverPropertyInfo("password", null).apply {
31 | this.required = true
32 | this.description = "Password for credentials"
33 | },
34 | DriverPropertyInfo("weakssl", null).apply {
35 | this.required = false
36 | this.description = "Set to true if the driver should accept self signed SSL certificates"
37 | }
38 | )
39 | }
40 |
41 | internal fun parseUrl(url: String): Pair {
42 | val props = Properties()
43 | val parts = url.split('?')
44 | if (parts.size == 2) {
45 | parts[1].split('&').forEach {
46 | val (key, value) = it.split('=')
47 | props[key] = value
48 | }
49 | }
50 | return Pair(parts[0], props)
51 | }
52 |
53 | override fun jdbcCompliant(): Boolean = false
54 |
55 | override fun acceptsURL(url: String?): Boolean {
56 | return url?.toLowerCase(Locale.ENGLISH)?.startsWith(Constants.JdbcPrefix) ?: false
57 | }
58 |
59 | override fun connect(url: String?, props: Properties?): Connection? {
60 | if (url == null) {
61 | throw SQLException("url cannot be null")
62 | } else {
63 | return if (!acceptsURL(url)) {
64 | null
65 | } else {
66 | val (baseUrl, urlProps) = parseUrl(url)
67 | props?.putAll(urlProps)
68 | LConnection(baseUrl, props ?: Properties())
69 | }
70 | }
71 | }
72 |
73 | override fun getMinorVersion(): Int = 0
74 | override fun getMajorVersion(): Int = 0
75 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/TypeInfo.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import org.apache.avro.Schema
4 |
5 | data class TypeInfo(
6 | val name: String,
7 | val dataType: Int, // the java.sql type int that matches this type
8 | val precision: Int,
9 | val signed: Boolean,
10 | val maxScale: Int,
11 | val minScale: Int,
12 | val literalEscape: Char?) {
13 |
14 | companion object {
15 |
16 | val Boolean = TypeInfo(Schema.Type.BOOLEAN.name, java.sql.Types.BOOLEAN, 0, false, 0, 0, null)
17 | val Bytes = TypeInfo(Schema.Type.BYTES.name, java.sql.Types.ARRAY, 0, false, 0, 0, null)
18 | val Decimal = TypeInfo("DECIMAL", java.sql.Types.DECIMAL, 32, false, 22, 0, null)
19 | val Double = TypeInfo(Schema.Type.DOUBLE.name, java.sql.Types.DOUBLE, 0, false, 0, 0, null)
20 | val Float = TypeInfo(Schema.Type.FLOAT.name, java.sql.Types.FLOAT, 0, false, 0, 0, null)
21 | val Date = TypeInfo("DATE", java.sql.Types.DATE, 0, false, 0, 0, '"')
22 | val Time = TypeInfo("TIME", java.sql.Types.TIME, 0, false, 0, 0, '"')
23 | val Timestamp = TypeInfo("TIMESTAMP", java.sql.Types.TIMESTAMP, 0, false, 0, 0, null)
24 | val Int = TypeInfo(Schema.Type.INT.name, java.sql.Types.INTEGER, 0, false, 0, 0, null)
25 | val Long = TypeInfo(Schema.Type.LONG.name, java.sql.Types.BIGINT, 0, false, 0, 0, null)
26 | val String = TypeInfo(Schema.Type.STRING.name, java.sql.Types.VARCHAR, 0, false, 0, 0, '"')
27 |
28 | val all = listOf(
29 | Boolean, Bytes, Decimal, Double, Float, Date, Time, Timestamp, Int, Long, String
30 | )
31 | }
32 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/Utils.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import java.util.regex.Pattern
4 |
5 | object Utils {
6 | fun like(str: String, expr: String): Boolean {
7 | var regex = buildExpr(expr)
8 | regex = regex.replace("_", ".").replace("%", ".*?")
9 | val p = Pattern.compile(regex, Pattern.CASE_INSENSITIVE or Pattern.DOTALL)
10 | return p.matcher(str).matches()
11 | }
12 |
13 | fun buildExpr(s: String?): String {
14 | if (s == null) {
15 | throw IllegalArgumentException("String cannot be null")
16 | }
17 |
18 | val len = s.length
19 | if (len == 0) {
20 | return ""
21 | }
22 |
23 | val sb = StringBuilder(len * 2)
24 | for (i in 0 until len) {
25 | val c = s[i]
26 | if ("[](){}.*+?$^|#\\".indexOf(c) != -1) {
27 | sb.append("\\")
28 | }
29 | sb.append(c)
30 | }
31 | return sb.toString()
32 | }
33 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/Versions.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import io.lenses.jdbc4.util.Logging
4 | import java.util.*
5 |
6 | object Versions : Logging {
7 |
8 | private val properties = Properties()
9 |
10 | init {
11 | this.javaClass.getResourceAsStream("/lsql.versions").use {
12 | properties.load(it)
13 | }
14 | }
15 |
16 | private fun loadOrDefault(key: String): Int {
17 | val version = properties.getProperty(key)
18 | return if (version == null) {
19 | logger.warn("Can not retrieve version information for this build.", null)
20 | -1
21 | } else {
22 | version.toInt()
23 | }
24 | }
25 |
26 | fun driverMajorVersion(): Int = loadOrDefault("driver.major")
27 | fun driverMinorVersion(): Int = loadOrDefault("driver.minor")
28 | fun databaseMajorVersion(): Int = loadOrDefault("lenses.major")
29 | fun databaseMinorVersion(): Int = loadOrDefault("lenses.major")
30 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/avroExtensions.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import org.apache.avro.LogicalTypes
4 | import org.apache.avro.Schema
5 |
6 | fun Schema.isNullable(): Boolean {
7 | return this.type == Schema.Type.UNION &&
8 | this.types.firstOrNull { it -> it.type == Schema.Type.NULL } != null
9 | }
10 |
11 | fun Schema.isNumber(): Boolean {
12 | return when (this.type) {
13 | Schema.Type.FLOAT, Schema.Type.INT, Schema.Type.DOUBLE, Schema.Type.LONG -> true
14 | else -> false
15 | }
16 | }
17 |
18 | fun Schema.scale(): Int {
19 | val logicalType = this.logicalType
20 | return when (logicalType) {
21 | is LogicalTypes.Decimal -> logicalType.scale
22 | else -> 0
23 | }
24 | }
25 |
26 | fun Schema.fromUnion(): Schema {
27 | return when (this.type) {
28 | Schema.Type.UNION -> {
29 | val schemaTypes = this.types
30 | return when {
31 | schemaTypes.size == 1 -> types[0]
32 | schemaTypes.size == 2 -> schemaTypes.first { it -> it.type != Schema.Type.NULL }
33 | else -> throw IllegalArgumentException("Not a Union schema")
34 | }
35 | }
36 | else -> this
37 | }
38 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/client/AuthenticationException.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.client
2 |
3 | class AuthenticationException(msg: String) : Exception(msg)
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/client/Credentials.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.client
2 |
3 | data class Credentials(val user: String,
4 | val password: String)
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/client/JdbcRequestMessage.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.client
2 |
3 | data class JdbcRequestMessage(val sql: String,
4 | val token: String)
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/client/LensesClient.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.client
2 |
3 | import arrow.core.*
4 | import com.fasterxml.jackson.databind.JsonNode
5 | import com.fasterxml.jackson.databind.node.NullNode
6 | import com.fasterxml.jackson.databind.node.TextNode
7 | import io.ktor.client.HttpClient
8 | import io.ktor.client.features.compression.ContentEncoding
9 | import io.ktor.client.features.json.JacksonSerializer
10 | import io.ktor.client.features.json.JsonFeature
11 | import io.ktor.client.features.websocket.WebSockets
12 | import io.ktor.client.request.HttpRequestBuilder
13 | import io.ktor.client.request.accept
14 | import io.ktor.client.request.header
15 | import io.ktor.client.request.request
16 | import io.ktor.client.response.HttpResponse
17 | import io.ktor.client.response.readText
18 | import io.ktor.http.ContentType
19 | import io.ktor.http.HttpMethod
20 | import io.ktor.http.HttpStatusCode
21 | import io.ktor.http.contentType
22 | import io.ktor.util.KtorExperimentalAPI
23 | import io.lenses.jdbc4.JacksonSupport
24 | import io.lenses.jdbc4.normalizeRecord
25 | import io.lenses.jdbc4.resultset.RowResultSet
26 | import io.lenses.jdbc4.resultset.WebSocketResultSet
27 | import io.lenses.jdbc4.resultset.WebsocketConnection
28 | import io.lenses.jdbc4.resultset.emptyResultSet
29 | import io.lenses.jdbc4.row.PairRow
30 | import io.lenses.jdbc4.row.Row
31 | import io.lenses.jdbc4.util.Logging
32 | import kotlinx.coroutines.ObsoleteCoroutinesApi
33 | import org.apache.avro.Schema
34 | import org.glassfish.tyrus.client.ClientManager
35 | import org.glassfish.tyrus.client.ClientProperties
36 | import org.springframework.web.socket.*
37 | import org.springframework.web.socket.client.WebSocketClient
38 | import org.springframework.web.socket.client.standard.StandardWebSocketClient
39 | import java.net.URI
40 | import java.util.concurrent.LinkedBlockingQueue
41 |
42 | data class Token(val value: String)
43 |
44 | sealed class JdbcError {
45 | open val cause: Throwable? = null
46 |
47 | data class AuthenticationFailure(val message: String) : JdbcError()
48 | data class InitialError(val message: String) : JdbcError()
49 | object ExecutionError : JdbcError()
50 | object NoData : JdbcError()
51 | data class ParseError(override val cause: Throwable) : JdbcError()
52 | data class UnsupportedRowType(val type: String) : JdbcError()
53 | }
54 |
55 | val frameToRecord: (String, Schema) -> Either = { msg, schema ->
56 | Try { JacksonSupport.mapper.readTree(msg) }
57 | .toEither { JdbcError.ParseError(it) }
58 | .map { node ->
59 | val data = node["data"]
60 | val value = data["value"]
61 | val values = when (schema.type) {
62 | Schema.Type.RECORD -> when (value) {
63 | null -> normalizeRecord(schema, NullNode.instance)
64 | else -> normalizeRecord(schema, value)
65 | }
66 | else -> when (value) {
67 | null -> emptyList()
68 | else -> listOf("value" to value.toString())
69 | }
70 | }
71 | PairRow(values)
72 | }
73 | }
74 |
75 | class LensesClient(private val url: String,
76 | private val credentials: Credentials,
77 | private val weakSSL: Boolean) : AutoCloseable, Logging {
78 |
79 | companion object {
80 | const val LensesTokenHeader = "X-Kafka-Lenses-Token"
81 | }
82 |
83 | private fun parseSchema(node: JsonNode): Either = Try {
84 | val json = when (val valueSchema = node["data"]["valueSchema"]) {
85 | is TextNode -> valueSchema.asText()
86 | is NullNode -> Schema.create(Schema.Type.NULL).toString(true)
87 | null -> Schema.create(Schema.Type.NULL).toString(true)
88 | else -> JacksonSupport.mapper.writeValueAsString(valueSchema)
89 | }
90 | Schema.Parser().parse(json)
91 | }.toEither { JdbcError.ParseError(it) }
92 |
93 | private val frameToRow: (String, Schema) -> Either = { msg, schema ->
94 | Try { JacksonSupport.mapper.readTree(msg) }
95 | .toEither { JdbcError.ParseError(it) }
96 | .flatMap { node ->
97 | when (val type = node["type"].textValue()) {
98 | "RECORD" -> frameToRecord(msg, schema)
99 | "END" -> Right(null)
100 | else -> throw UnsupportedOperationException("Unsupported row type $type")
101 | }
102 | }
103 | }
104 |
105 | private val wssclient = ClientManager.createClient().apply {
106 | this.properties[ClientProperties.REDIRECT_ENABLED] = true
107 | }
108 |
109 | @UseExperimental(KtorExperimentalAPI::class)
110 | private val client = HttpClient {
111 | install(WebSockets)
112 | install(ContentEncoding) {
113 | gzip()
114 | identity()
115 | }
116 | install(JsonFeature) {
117 | serializer = JacksonSerializer()
118 | }
119 | }
120 |
121 | private var isClosed: Boolean = false
122 |
123 | override fun close() {
124 | client.close()
125 | isClosed = true
126 | }
127 |
128 | fun isClosed(): Boolean = isClosed
129 |
130 | // attempts to authenticate, and returns the auth token if successful
131 | private suspend fun authenticate(): Either {
132 |
133 | val endpoint = "$url/api/login"
134 | logger.debug("Authenticating at $endpoint")
135 |
136 | val resp = client.request(endpoint) {
137 | method = HttpMethod.Post
138 | contentType(ContentType.Application.Json)
139 | accept(ContentType.Text.Plain)
140 | body = credentials
141 | }
142 |
143 | return if (resp.status == HttpStatusCode.OK)
144 | Token(resp.readText()).right()
145 | else
146 | JdbcError.AuthenticationFailure(resp.readText()).left()
147 | }
148 |
149 | @ObsoleteCoroutinesApi
150 | suspend fun execute(sql: String): Either {
151 | val endpoint = "$url/api/ws/v3/jdbc/execute"
152 | return withAuthenticatedWebsocket(endpoint, sql).flatMap {
153 | // we always need the first row to generate the schema unless it is an end then we have an empty resultset
154 |
155 | val msg = it.queue.take()
156 | val node = JacksonSupport.mapper.readTree(msg)
157 |
158 | when (node["type"].textValue()) {
159 | "END" -> emptyResultSet.right()
160 | "SCHEMA" -> parseSchema(node).map { schema ->
161 | WebSocketResultSet(null, schema, it, frameToRow)
162 | }
163 | else -> JdbcError.InitialError(node.toString()).left()
164 | }
165 | }
166 | }
167 |
168 | private suspend fun withAuthenticatedWebsocket(url: String, sql: String): Either {
169 | return authenticate().flatMap { token ->
170 | val uri = URI.create(url.replace("https://", "ws://").replace("http://", "ws://"))
171 | val headers = WebSocketHttpHeaders()
172 | headers.add(LensesTokenHeader, token.value)
173 | //Expected to read from env variables
174 | //val sslContextConfigurator = SslContextConfigurator()
175 | /* sslContextConfigurator.setTrustStoreFile("...");
176 | * sslContextConfigurator.setTrustStorePassword("...");
177 | * sslContextConfigurator.setTrustStoreType("...");
178 | * sslContextConfigurator.setKeyStoreFile("...");
179 | * sslContextConfigurator.setKeyStorePassword("...");
180 | * sslContextConfigurator.setKeyStoreType("...");
181 | */
182 | //val sslEngineConfigurator = SslEngineConfigurator(sslContextConfigurator, true,false, false)
183 |
184 | val clientManager:ClientManager = ClientManager.createClient()
185 | clientManager.properties[ClientProperties.REDIRECT_ENABLED] = true
186 | //clientManager.properties[ClientProperties.SSL_ENGINE_CONFIGURATOR] = sslEngineConfigurator
187 | val wsclient: WebSocketClient = StandardWebSocketClient(clientManager)
188 |
189 | val queue = LinkedBlockingQueue(200)
190 | val jdbcRequest = JdbcRequestMessage(sql, token.value)
191 | val handler = object : WebSocketHandler {
192 | override fun handleTransportError(session: WebSocketSession,
193 | exception: Throwable) {
194 | logger.error("Websocket error", exception)
195 | }
196 |
197 | override fun afterConnectionClosed(session: WebSocketSession,
198 | closeStatus: CloseStatus) {
199 | logger.debug("Connection closed $closeStatus")
200 | }
201 |
202 | override fun handleMessage(session: WebSocketSession,
203 | message: WebSocketMessage<*>) {
204 | logger.debug("Handling message in thread ${Thread.currentThread().id}")
205 | when (message) {
206 | is TextMessage -> queue.put(message.payload)
207 | else -> {
208 | logger.error("Unsupported message type $message")
209 | throw java.lang.UnsupportedOperationException("Unsupported message type $message")
210 | }
211 | }
212 | }
213 |
214 | override fun afterConnectionEstablished(session: WebSocketSession) {
215 | logger.debug("Connection established. Sending the SQL to the server...")
216 | //send the SQL and the token
217 | val json = JacksonSupport.toJson(jdbcRequest)
218 | val message = TextMessage(json.toByteArray())
219 | session.sendMessage(message)
220 |
221 | }
222 |
223 | override fun supportsPartialMessages(): Boolean = false
224 | }
225 |
226 | logger.debug("Connecting to websocket at $uri")
227 | val sess = wsclient.doHandshake(handler, headers, uri).get()
228 |
229 | val conn = object : WebsocketConnection {
230 | override val queue = queue
231 | override fun close() {
232 | if(sess.isOpen) {
233 | try {
234 | sess.close()
235 | } catch (t: Throwable) {
236 |
237 | }
238 | }
239 | }
240 | override fun isClosed(): Boolean = !sess.isOpen
241 | }
242 | conn.right()
243 | }
244 | }
245 |
246 | private suspend fun withAuthenticated(req: HttpRequestBuilder, f: (HttpResponse) -> T): Either {
247 | return authenticate().flatMap { token ->
248 | req.header(LensesTokenHeader, token.value)
249 | val resp = client.request(req)
250 | if (resp.status == HttpStatusCode.OK) f(resp).right() else JdbcError.ExecutionError.left()
251 | }
252 | }
253 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/mappers/SelectFieldsMapper.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.mappers
2 |
3 | import io.lenses.jdbc4.Schemas
4 | import io.lenses.jdbc4.row.ListRow
5 | import io.lenses.jdbc4.row.Row
6 | import java.sql.DatabaseMetaData
7 | import java.sql.Types
8 |
9 | val SelectFieldsMapper: (Row) -> Row = { row ->
10 | val values = listOf(
11 | null,
12 | null,
13 | row.getString(2),
14 | row.getString(1),
15 | Types.OTHER, // todo AvroSchemas.sqlType(field.schema()),
16 | row.getString(3),
17 | 0, // todo
18 | 0,
19 | 0, // todo in lenses sql field.schema().scale(), // DECIMAL_DIGITS
20 | 10, // NUM_PREC_RADIX
21 | DatabaseMetaData.columnNullableUnknown, //if (field.schema().isNullable) DatabaseMetaData.columnNullable else DatabaseMetaData.columnNoNulls,
22 | row.getString(4), // REMARKS
23 | null, // COLUMN_DEF unused
24 | null, // SQL_DATA_TYPE unused
25 | null, // SQL_DATETIME_SUB unused
26 | 0, // CHAR_OCTET_LENGTH
27 | 0, // pos + 1, // todo update in samsql // ORDINAL_POSITION
28 | "NO", // todo update in samsql if (field.schema().isNullable) "YES" else "NO", // IS_NULLABLE
29 | null, // SCOPE_CATALOG
30 | null, // SCOPE_SCHEMA
31 | null, // SCOPE_TABLE
32 | null, // SOURCE_DATA_TYPE
33 | "NO", // IS_AUTOINCREMENT
34 | "" // IS_GENERATEDCOLUMN
35 | )
36 | assert(values.size == Schemas.Columns.fields.size) { "List has ${values.size} but should have ${Schemas.Columns.fields.size}" }
37 | ListRow(values)
38 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/mappers/ShowTableTypesMapper.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.mappers
2 |
3 | import io.lenses.jdbc4.row.ListRow
4 | import io.lenses.jdbc4.row.Row
5 |
6 | /**
7 | * The table type is:
8 | *
9 | * - TABLE_TYPE String {@code =>} table type. Typical types are "TABLE",
10 | * "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY",
11 | * "LOCAL TEMPORARY", "ALIAS", "SYNONYM".
12 | *
13 | */
14 | object ShowTableTypesMapper : (Row) -> Row {
15 | override fun invoke(row: Row): Row = ListRow(listOf(row.getString(1)))
16 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/mappers/ShowTablesMapper.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.mappers
2 |
3 | import io.lenses.jdbc4.row.ListRow
4 | import io.lenses.jdbc4.row.Row
5 |
6 | /**
7 | * Each table description has the following columns:
8 | *
9 | * - TABLE_CAT String {@code =>} table catalog (may be
null
)
10 | * - TABLE_SCHEM String {@code =>} table schema (may be
null
)
11 | * - TABLE_NAME String {@code =>} table name
12 | *
- TABLE_TYPE String {@code =>} table type. Typical types are "TABLE",
13 | * "VIEW", "SYSTEM TABLE", "GLOBAL TEMPORARY",
14 | * "LOCAL TEMPORARY", "ALIAS", "SYNONYM".
15 | *
- REMARKS String {@code =>} explanatory comment on the table
16 | *
- TYPE_CAT String {@code =>} the types catalog (may be
null
)
17 | * - TYPE_SCHEM String {@code =>} the types schema (may be
null
)
18 | * - TYPE_NAME String {@code =>} type name (may be
null
)
19 | * - SELF_REFERENCING_COL_NAME String {@code =>} name of the designated
20 | * "identifier" column of a typed table (may be
null
)
21 | * - REF_GENERATION String {@code =>} specifies how values in
22 | * SELF_REFERENCING_COL_NAME are created. Values are
23 | * "SYSTEM", "USER", "DERIVED". (may be
null
)
24 | *
25 | */
26 | object ShowTablesMapper : (Row) -> Row {
27 | override fun invoke(row: Row): Row = ListRow(
28 | listOf(
29 | null,
30 | null,
31 | row.getString(1),
32 | row.getString(2),
33 | null,
34 | null,
35 | null,
36 | null,
37 | null,
38 | null
39 | )
40 | )
41 | }
42 |
43 |
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/normalizeRecord.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import com.fasterxml.jackson.databind.JsonNode
4 | import com.fasterxml.jackson.databind.node.NullNode
5 | import org.apache.avro.Schema
6 |
7 | fun normalizeRecord(schema: Schema, node: JsonNode, prefix: String = ""): List> {
8 | // todo expand this algo to cover non-record types
9 | require(schema.type == Schema.Type.RECORD) {
10 | val a = "qwe"
11 | "Unsupported type $schema"
12 | }
13 | return schema.fields.flatMap { field ->
14 | val childNode = node[field.name()]
15 | when {
16 | childNode == null && field.schema().type == Schema.Type.RECORD ->
17 | normalizeRecord(field.schema(), NullNode.instance, prefix + field.name() + ".")
18 | childNode == null -> listOf((prefix + field.name()) to null)
19 | childNode.isArray -> listOf((prefix + field.name()) to childNode.elements().asSequence().map {
20 | normalizeRecord(field.schema(), it, prefix)
21 | })
22 | childNode.isObject -> normalizeRecord(field.schema(), childNode, prefix + field.name() + ".")
23 | else -> {
24 | val value = valueFromNode(childNode)
25 | listOf((prefix + field.name()) to value)
26 | }
27 | }
28 | }
29 | }
30 |
31 | fun valueFromNode(node: JsonNode): Any? = when {
32 | node.isBigDecimal -> node.decimalValue()
33 | node.isTextual -> node.textValue()
34 | node.isBigInteger -> node.bigIntegerValue()
35 | node.isBinary -> node.binaryValue()
36 | node.isBoolean -> node.booleanValue()
37 | node.isDouble -> node.doubleValue()
38 | node.isFloat -> node.floatValue()
39 | node.isInt -> node.intValue()
40 | node.isLong -> node.longValue()
41 | node.isNull -> null
42 | node.isShort -> node.shortValue()
43 | else -> throw UnsupportedOperationException()
44 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/resultset/AbstractResultSet.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.resultset
2 |
3 | import io.lenses.jdbc4.IWrapper
4 | import java.sql.ResultSet
5 | import java.sql.SQLFeatureNotSupportedException
6 | import java.sql.SQLWarning
7 |
8 | interface AbstractResultSet : ResultSet, IWrapper {
9 |
10 | override fun getCursorName(): String = throw SQLFeatureNotSupportedException()
11 |
12 | override fun clearWarnings() {}
13 | override fun getWarnings(): SQLWarning? = null
14 |
15 | override fun isWrapperFor(iface: Class<*>?): Boolean = _isWrapperFor(iface)
16 | override fun unwrap(iface: Class): T = _unwrap(iface)
17 |
18 | }
19 |
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/resultset/AvroSchemaResultSetMetaData.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.resultset
2 |
3 | import io.lenses.jdbc4.AvroSchemas
4 | import io.lenses.jdbc4.IWrapper
5 | import org.apache.avro.LogicalTypes
6 | import org.apache.avro.Schema
7 | import java.sql.ResultSetMetaData
8 | import java.sql.SQLException
9 |
10 | class AvroSchemaResultSetMetaData(private val schema: Schema) : ResultSetMetaData, io.lenses.jdbc4.IWrapper {
11 |
12 | override fun getTableName(column: Int): String = schema.name
13 |
14 | override fun isNullable(column: Int): Int {
15 | return when (schemaForIndex(column).isNullable) {
16 | true -> ResultSetMetaData.columnNullable
17 | false -> ResultSetMetaData.columnNullableUnknown
18 | }
19 | }
20 |
21 | override fun isWrapperFor(iface: Class<*>?): Boolean = _isWrapperFor(iface)
22 | override fun unwrap(iface: Class): T = _unwrap(iface)
23 | override fun isDefinitelyWritable(column: Int): Boolean = false
24 |
25 | override fun isSearchable(column: Int): Boolean = true
26 |
27 | override fun getPrecision(column: Int): Int {
28 | val schema = schemaForIndex(column)
29 | return when (typeForIndex(column)) {
30 | Schema.Type.BYTES ->
31 | when (schema.logicalType) {
32 | is LogicalTypes.Decimal -> (schema.logicalType as LogicalTypes.Decimal).precision
33 | else -> 0
34 | }
35 | Schema.Type.FIXED -> schema.fixedSize
36 | Schema.Type.STRING -> Int.MAX_VALUE
37 | else -> 0
38 | }
39 | }
40 |
41 | override fun isCaseSensitive(column: Int): Boolean = true
42 |
43 | override fun getScale(column: Int): Int {
44 | return when (typeForIndex(column)) {
45 | Schema.Type.BYTES -> {
46 | when (val logicalType = schemaForIndex(column).logicalType) {
47 | is LogicalTypes.Decimal -> logicalType.scale
48 | else -> 0
49 | }
50 | }
51 | else -> 0
52 | }
53 | }
54 |
55 | // required "" when not supported
56 | override fun getSchemaName(column: Int): String = ""
57 |
58 | // required "" when not supported
59 | override fun getCatalogName(column: Int): String = ""
60 |
61 | override fun getColumnClassName(column: Int): String {
62 | val type = typeForIndex(column)
63 | return io.lenses.jdbc4.AvroSchemas.jvmClassName(type)
64 | }
65 |
66 | override fun getColumnType(column: Int): Int {
67 | val schema = schemaForIndex(column)
68 | return AvroSchemas.sqlType(schema)
69 | }
70 |
71 | override fun isCurrency(column: Int): Boolean = false
72 |
73 | override fun getColumnName(column: Int): String = getColumnLabel(column)
74 | override fun getColumnLabel(column: Int): String {
75 | return when (schema.type) {
76 | Schema.Type.RECORD -> schema.fields[column - 1].name()
77 | else -> "unnamed"
78 | }
79 | }
80 |
81 | override fun isWritable(column: Int): Boolean = false
82 |
83 | override fun isReadOnly(column: Int): Boolean = true
84 |
85 | override fun isSigned(column: Int): Boolean {
86 | val type = typeForIndex(column)
87 | val schema = schemaForIndex(column)
88 | return when (type) {
89 | Schema.Type.BYTES ->
90 | when (schema.logicalType) {
91 | is LogicalTypes.Decimal -> true
92 | else -> false
93 | }
94 | Schema.Type.DOUBLE -> true
95 | Schema.Type.FLOAT -> true
96 | Schema.Type.INT ->
97 | when (schema.logicalType) {
98 | is LogicalTypes.TimeMillis -> false
99 | is LogicalTypes.Date -> false
100 | else -> true
101 | }
102 | Schema.Type.LONG ->
103 | when (schema.logicalType) {
104 | is LogicalTypes.TimestampMillis -> false
105 | is LogicalTypes.TimestampMicros -> false
106 | is LogicalTypes.TimeMicros -> false
107 | else -> true
108 | }
109 | else -> false
110 | }
111 | }
112 |
113 | override fun getColumnTypeName(column: Int): String = typeForIndex(column).name
114 |
115 | private fun schemaForIndex(index: Int): Schema {
116 | return when (schema.type) {
117 | Schema.Type.RECORD -> {
118 | if (index < 1 || index > schema.fields.size)
119 | throw IndexOutOfBoundsException("Index $index is out of bounds; note: JDBC drivers are 1-indexed")
120 | schema.fields[index - 1].schema()
121 | }
122 | else -> {
123 | if (index != 1)
124 | throw IndexOutOfBoundsException("Index $index is out of bounds; note: JDBC drivers are 1-indexed")
125 | schema
126 | }
127 | }
128 | }
129 |
130 | private fun typeForIndex(index: Int): Schema.Type {
131 | return when (schema.type) {
132 | Schema.Type.RECORD -> {
133 | if (index < 1 || index > schema.fields.size)
134 | throw IndexOutOfBoundsException("Index $index is out of bounds; note: JDBC drivers are 1-indexed")
135 | schema.fields[index - 1].schema().type
136 | }
137 | else -> {
138 | if (index != 1)
139 | throw IndexOutOfBoundsException("Index $index is out of bounds; note: JDBC drivers are 1-indexed")
140 | schema.type
141 | }
142 | }
143 | }
144 |
145 | override fun isAutoIncrement(column: Int): Boolean = false
146 |
147 | override fun getColumnDisplaySize(column: Int): Int = 0
148 |
149 | override fun getColumnCount(): Int {
150 | // can be a record or a single field
151 | return when (schema.type) {
152 | Schema.Type.RECORD -> schema.fields.size
153 | else -> 1
154 | }
155 | }
156 |
157 | // returns the index for a given column label
158 | // 1-indexed
159 | internal fun indexForLabel(label: String): Int {
160 | val index = schema.fields.indexOfFirst { it.name() == label }
161 | if (index < 0 || index > schema.fields.size - 1)
162 | throw SQLException("Unknown column $label")
163 | return index + 1
164 | }
165 |
166 | // returns the field for a given column label
167 | internal fun fieldForLabel(label: String): Schema.Field {
168 | return schema.fields.find { it.name() == label } ?: throw SQLException("Unknown column $label")
169 | }
170 | }
171 |
172 | fun ResultSetMetaData.indexForLabel(label: String): Int = when (this) {
173 | is AvroSchemaResultSetMetaData -> this.indexForLabel(label)
174 | else -> throw UnsupportedOperationException()
175 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/resultset/EmptyResultSetMetaData.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.resultset
2 |
3 | import io.lenses.jdbc4.IWrapper
4 | import java.sql.ResultSetMetaData
5 |
6 | object EmptyResultSetMetaData : ResultSetMetaData, io.lenses.jdbc4.IWrapper {
7 | override fun isNullable(column: Int): Int = ResultSetMetaData.columnNoNulls
8 | override fun unwrap(iface: Class): T = _unwrap(iface)
9 | override fun isDefinitelyWritable(column: Int): Boolean = false
10 | override fun isSearchable(column: Int): Boolean = false
11 | override fun getPrecision(column: Int): Int = 0
12 | override fun isCaseSensitive(column: Int): Boolean = false
13 | override fun getScale(column: Int): Int = 0
14 | override fun getSchemaName(column: Int): String? = null
15 | override fun getColumnClassName(column: Int): String? = null
16 | override fun getCatalogName(column: Int): String? = null
17 | override fun isWrapperFor(iface: Class<*>?): Boolean = false
18 | override fun getColumnType(column: Int): Int = 0
19 | override fun isCurrency(column: Int): Boolean = false
20 | override fun getColumnLabel(column: Int): String? = null
21 | override fun isWritable(column: Int): Boolean = false
22 | override fun isReadOnly(column: Int): Boolean = false
23 | override fun isSigned(column: Int): Boolean = false
24 | override fun getColumnTypeName(column: Int): String? = null
25 | override fun getColumnName(column: Int): String? = null
26 | override fun isAutoIncrement(column: Int): Boolean = false
27 | override fun getColumnDisplaySize(column: Int): Int = 0
28 | override fun getColumnCount(): Int = 0
29 | override fun getTableName(column: Int): String? = null
30 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/resultset/EmptyRow.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.resultset
2 |
3 | import io.lenses.jdbc4.row.Row
4 | import java.io.Reader
5 | import java.math.BigDecimal
6 | import java.sql.Date
7 | import java.sql.RowId
8 | import java.sql.Time
9 | import java.sql.Timestamp
10 | import java.util.*
11 |
12 | val emptyResultSet: RowResultSet = ListResultSet(null, null, emptyList())
13 |
14 | object EmptyRow : Row {
15 |
16 | override fun getObject(index: Int): Any? {
17 | TODO()
18 | }
19 |
20 | override fun getRowId(index: Int): RowId {
21 | TODO()
22 | }
23 |
24 | override fun charStream(index: Int): Reader? {
25 | TODO()
26 | }
27 |
28 | override fun getBigDecimal(index: Int, scale: Int): BigDecimal? {
29 | TODO()
30 | }
31 |
32 | override fun getBigDecimal(index: Int): BigDecimal? {
33 | TODO()
34 | }
35 |
36 | override fun getBoolean(index: Int): Boolean {
37 | TODO()
38 | }
39 |
40 | override fun getByte(index: Int): Byte {
41 | TODO()
42 | }
43 |
44 | override fun getBytes(index: Int): ByteArray? {
45 | TODO()
46 | }
47 |
48 | override fun getDate(index: Int): Date? {
49 | TODO()
50 | }
51 |
52 | override fun getDate(index: Int, cal: Calendar?): Date? {
53 | TODO()
54 | }
55 |
56 | override fun getFloat(index: Int): Float {
57 | TODO()
58 | }
59 |
60 | override fun getInt(index: Int): Int {
61 | TODO()
62 | }
63 |
64 | override fun getTime(index: Int): Time? = null
65 | override fun getTime(index: Int, cal: Calendar?): Time? = null
66 | override fun getLong(index: Int): Long {
67 | TODO()
68 | }
69 |
70 | override fun getTimestamp(index: Int): Timestamp? {
71 | TODO()
72 | }
73 |
74 | override fun getTimestamp(index: Int, cal: Calendar?): Timestamp? {
75 | TODO()
76 | }
77 |
78 | override fun getDouble(index: Int): Double {
79 | TODO()
80 | }
81 |
82 | override fun getString(index: Int): String? {
83 | TODO()
84 | }
85 |
86 | override fun getShort(index: Int): Short {
87 | TODO()
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/resultset/ListResultSet.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.resultset
2 |
3 | import io.lenses.jdbc4.row.Row
4 | import org.apache.avro.Schema
5 | import java.sql.ResultSet
6 | import java.sql.ResultSetMetaData
7 | import java.sql.SQLException
8 | import java.sql.SQLFeatureNotSupportedException
9 | import java.sql.Statement
10 |
11 | class ListResultSet(val stmt: Statement?,
12 | private val schema: Schema?,
13 | private val rows: List) : RowResultSet(),
14 | ImmutableResultSet,
15 | UnsupportedTypesResultSet {
16 |
17 | companion object {
18 | fun emptyOf(schema:Schema) = ListResultSet(null, schema, emptyList())
19 | }
20 |
21 | private var rowNumber: Int = -1
22 | private var row: Row? = null
23 |
24 | override fun getRow(): Int = rowNumber
25 | override fun currentRow(): Row = row ?: throw SQLException("No current row")
26 |
27 | override fun meta(): ResultSetMetaData = if (schema == null) EmptyResultSetMetaData else AvroSchemaResultSetMetaData(schema)
28 |
29 | private fun fetchRow(): Boolean {
30 | return if (0 <= rowNumber && rowNumber < rows.size) {
31 | row = rows[rowNumber]
32 | true
33 | } else {
34 | row = null
35 | false
36 | }
37 | }
38 |
39 | override fun next(): Boolean {
40 | rowNumber++
41 | return fetchRow()
42 | }
43 |
44 | override fun isClosed(): Boolean = true
45 | override fun close() {}
46 |
47 | override fun getStatement(): Statement? = stmt
48 | override fun getMetaData(): ResultSetMetaData = meta()
49 |
50 | override fun beforeFirst() {
51 | rowNumber = -1
52 | }
53 |
54 | override fun isFirst(): Boolean = rowNumber == 0
55 |
56 | override fun isLast(): Boolean = rowNumber == rows.size - 1
57 |
58 | override fun last(): Boolean {
59 | rowNumber = rows.size - 1
60 | return fetchRow()
61 | }
62 |
63 | override fun isAfterLast(): Boolean = rowNumber >= rows.size
64 |
65 | override fun relative(rows: Int): Boolean {
66 | rowNumber += rows
67 | return fetchRow()
68 | }
69 |
70 | override fun absolute(row: Int): Boolean {
71 | rowNumber = if (row < 0)
72 | rows.size + row
73 | else
74 | row - 1
75 | return fetchRow()
76 | }
77 |
78 | override fun first(): Boolean {
79 | rowNumber = 0
80 | return fetchRow()
81 | }
82 |
83 | override fun getType(): Int {
84 | return ResultSet.TYPE_SCROLL_INSENSITIVE
85 | }
86 |
87 | override fun setFetchSize(rows: Int) {}
88 |
89 | override fun afterLast() {
90 | rowNumber = rows.size
91 | }
92 |
93 | override fun previous(): Boolean {
94 | rowNumber--
95 | return rowNumber >= 0 && rowNumber < rows.size
96 | }
97 |
98 | override fun setFetchDirection(direction: Int) = throw SQLFeatureNotSupportedException()
99 |
100 | override fun getFetchSize(): Int = -1
101 |
102 | override fun isBeforeFirst(): Boolean {
103 | return rowNumber < 0
104 | }
105 |
106 | override fun getFetchDirection(): Int = ResultSet.FETCH_FORWARD
107 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/resultset/PullForwardOnlyResultSet.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.resultset
2 |
3 | import java.sql.ResultSet
4 | import java.sql.SQLException
5 | import java.sql.SQLFeatureNotSupportedException
6 |
7 | /**
8 | * Partial implementation of [ResultSet] for such result sets that are
9 | * pull-based forward moving only. This means that the cursor cannot be manipulated by
10 | * the user of this resultset, other than invoking next to move along the stream.
11 | */
12 | interface PullForwardOnlyResultSet : ResultSet {
13 |
14 | override fun absolute(row: Int): Boolean = throw SQLFeatureNotSupportedException()
15 | override fun relative(rows: Int): Boolean = throw SQLFeatureNotSupportedException()
16 | override fun previous() = throw SQLFeatureNotSupportedException()
17 | override fun beforeFirst() = throw SQLFeatureNotSupportedException()
18 | override fun afterLast() = throw SQLFeatureNotSupportedException()
19 | override fun first() = throw SQLFeatureNotSupportedException()
20 | override fun last() = throw SQLFeatureNotSupportedException()
21 |
22 | override fun getFetchSize(): Int = -1
23 | override fun setFetchSize(rows: Int) {} // no op since this resultset is streaming
24 |
25 | override fun getFetchDirection(): Int = ResultSet.FETCH_FORWARD
26 | override fun setFetchDirection(direction: Int): Unit = when (direction) {
27 | ResultSet.FETCH_FORWARD -> Unit
28 | else -> throw SQLException("Unsupported fetch direction $direction")
29 | }
30 |
31 | // streaming result sets can only go forwards
32 | override fun getType(): Int = ResultSet.TYPE_FORWARD_ONLY
33 |
34 | override fun isLast(): Boolean = false
35 | override fun isFirst(): Boolean = row == 1
36 | override fun isBeforeFirst(): Boolean = row < 1
37 | override fun isAfterLast(): Boolean = false
38 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/resultset/ResultSetIterator.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.resultset
2 |
3 | import java.sql.ResultSet
4 |
5 | fun resultSetIterator(rs: ResultSet): Iterator {
6 | return object : Iterator {
7 | override fun next(): ResultSet = rs
8 | override fun hasNext(): Boolean = rs.next()
9 | }
10 | }
11 |
12 | fun resultSetList(rs: ResultSet): List> {
13 | val results = mutableListOf>()
14 | while (rs.next()) {
15 | val row = (1..rs.metaData.columnCount).map { rs.getObject(it) }
16 | results.add(row.toList())
17 | }
18 | return results.toList()
19 | }
20 |
21 | fun ResultSet.toList() = resultSetList(this)
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/resultset/RowResultSet.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.resultset
2 |
3 | import io.lenses.jdbc4.row.Row
4 | import java.io.Reader
5 | import java.math.BigDecimal
6 | import java.sql.Date
7 | import java.sql.ResultSet
8 | import java.sql.ResultSetMetaData
9 | import java.sql.Time
10 | import java.sql.Timestamp
11 | import java.util.*
12 |
13 | /**
14 | * A base implemetation of [ResultSet] that models returned data rows as
15 | * instances of [Row]. It provides implementations of the resultset getXXX
16 | * conversion functions as delegations to the current row function.
17 | */
18 | abstract class RowResultSet : AbstractResultSet {
19 |
20 | // returns the row which the cursor is currently pointing to
21 | abstract fun currentRow(): Row
22 |
23 | // each time we invoke getXXX, this is the value that was returned last
24 | private var lastValue: Any? = null
25 |
26 | // returns the meta data for the current row
27 | protected abstract fun meta(): ResultSetMetaData
28 |
29 | override fun findColumn(label: String): Int = meta().indexForLabel(label)
30 |
31 | // updates the last returned value each time getXXX is invoked
32 | private fun trackObject(t: T): T {
33 | this.lastValue = t
34 | return t
35 | }
36 |
37 | override fun wasNull(): Boolean = lastValue == null
38 |
39 | override fun getDate(index: Int): Date? = trackObject(currentRow().getDate(index))
40 | override fun getDate(label: String): Date? = trackObject(getDate(meta().indexForLabel(label)))
41 | override fun getDate(index: Int, cal: Calendar?): Date? = trackObject(currentRow().getDate(index, cal))
42 | override fun getDate(label: String, cal: Calendar?): Date? =
43 | trackObject(currentRow().getDate(meta().indexForLabel(label), cal))
44 |
45 | override fun getBoolean(index: Int): Boolean = trackObject(currentRow().getBoolean(index))
46 | override fun getBoolean(label: String): Boolean = trackObject(currentRow().getBoolean(meta().indexForLabel(label)))
47 | override fun getBigDecimal(index: Int, scale: Int): BigDecimal? =
48 | trackObject(currentRow().getBigDecimal(index, scale))
49 |
50 | override fun getBigDecimal(label: String, scale: Int): BigDecimal? =
51 | trackObject(currentRow().getBigDecimal(meta().indexForLabel(label), scale))
52 |
53 | override fun getBigDecimal(index: Int): BigDecimal? = trackObject(currentRow().getBigDecimal(index))
54 | override fun getBigDecimal(label: String): BigDecimal? =
55 | trackObject(currentRow().getBigDecimal(meta().indexForLabel(label)))
56 |
57 | override fun getTime(index: Int): Time? = trackObject(currentRow().getTime(index))
58 | override fun getTime(label: String): Time? = trackObject(currentRow().getTime(meta().indexForLabel(label)))
59 | override fun getTime(index: Int, cal: Calendar): Time? = trackObject(currentRow().getTime(index, cal))
60 | override fun getTime(label: String, cal: Calendar): Time? =
61 | trackObject(currentRow().getTime(meta().indexForLabel(label), cal))
62 |
63 | override fun getByte(index: Int): Byte = trackObject(currentRow().getByte(index))
64 | override fun getByte(label: String): Byte = trackObject(currentRow().getByte(meta().indexForLabel(label)))
65 | override fun getString(index: Int): String? = trackObject(currentRow().getString(index))
66 | override fun getString(label: String): String? = trackObject(currentRow().getString(meta().indexForLabel(label)))
67 | override fun getObject(index: Int): Any? = trackObject(currentRow().getObject(index))
68 | override fun getObject(label: String): Any? = trackObject(currentRow().getObject(meta().indexForLabel(label)))
69 | override fun getLong(index: Int): Long = trackObject(currentRow().getLong(index))
70 | override fun getLong(label: String): Long = trackObject(currentRow().getLong(meta().indexForLabel(label)))
71 | override fun getFloat(index: Int): Float = trackObject(currentRow().getFloat(index))
72 | override fun getFloat(label: String): Float = trackObject(currentRow().getFloat(meta().indexForLabel(label)))
73 | override fun getInt(index: Int): Int = trackObject(currentRow().getInt(index))
74 | override fun getInt(label: String): Int = trackObject(currentRow().getInt(meta().indexForLabel(label)))
75 | override fun getShort(index: Int): Short = trackObject(currentRow().getShort(index))
76 | override fun getShort(label: String): Short = trackObject(currentRow().getShort(meta().indexForLabel(label)))
77 | override fun getTimestamp(index: Int): Timestamp? = trackObject(currentRow().getTimestamp(index))
78 | override fun getTimestamp(label: String): Timestamp? =
79 | trackObject(currentRow().getTimestamp(meta().indexForLabel(label)))
80 |
81 | override fun getTimestamp(index: Int, cal: Calendar): Timestamp? = trackObject(currentRow().getTimestamp(index, cal))
82 | override fun getTimestamp(label: String, cal: Calendar): Timestamp? =
83 | trackObject(currentRow().getTimestamp(meta().indexForLabel(label), cal))
84 |
85 | override fun getBytes(index: Int): ByteArray? = trackObject(currentRow().getBytes(index))
86 | override fun getBytes(label: String): ByteArray? = trackObject(currentRow().getBytes(meta().indexForLabel(label)))
87 | override fun getDouble(index: Int): Double = trackObject(currentRow().getDouble(index))
88 | override fun getDouble(label: String): Double = trackObject(currentRow().getDouble(meta().indexForLabel(label)))
89 | override fun getNString(index: Int): String? = trackObject(currentRow().getString(index))
90 | override fun getNString(label: String): String? = trackObject(currentRow().getString(meta().indexForLabel(label)))
91 | override fun getCharacterStream(index: Int): Reader? = currentRow().charStream(index)
92 | override fun getCharacterStream(label: String): Reader? = currentRow().charStream(meta().indexForLabel(label))
93 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/resultset/UnsupportedTypesResultSet.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.resultset
2 |
3 | import java.io.InputStream
4 | import java.io.Reader
5 | import java.net.URL
6 | import java.sql.Blob
7 | import java.sql.Clob
8 | import java.sql.NClob
9 | import java.sql.Ref
10 | import java.sql.ResultSet
11 | import java.sql.RowId
12 | import java.sql.SQLFeatureNotSupportedException
13 | import java.sql.SQLXML
14 |
15 | interface UnsupportedTypesResultSet : ResultSet {
16 | override fun getNClob(index: Int): NClob = throw SQLFeatureNotSupportedException()
17 | override fun getNClob(label: String?): NClob = throw SQLFeatureNotSupportedException()
18 | override fun getBinaryStream(index: Int): InputStream? = throw SQLFeatureNotSupportedException()
19 | override fun getBinaryStream(label: String): InputStream? = throw SQLFeatureNotSupportedException()
20 | override fun getBlob(index: Int): Blob? = throw SQLFeatureNotSupportedException()
21 | override fun getBlob(label: String): Blob? = throw SQLFeatureNotSupportedException()
22 | override fun getUnicodeStream(index: Int): InputStream = throw SQLFeatureNotSupportedException()
23 | override fun getUnicodeStream(label: String?): InputStream = throw SQLFeatureNotSupportedException()
24 | override fun getNCharacterStream(index: Int): Reader = throw SQLFeatureNotSupportedException()
25 | override fun getNCharacterStream(label: String?): Reader = throw SQLFeatureNotSupportedException()
26 | override fun getAsciiStream(index: Int): InputStream = throw SQLFeatureNotSupportedException()
27 | override fun getAsciiStream(label: String?): InputStream = throw SQLFeatureNotSupportedException()
28 | override fun getSQLXML(index: Int): SQLXML = throw SQLFeatureNotSupportedException()
29 | override fun getSQLXML(label: String?): SQLXML = throw SQLFeatureNotSupportedException()
30 | override fun getURL(index: Int): URL = throw SQLFeatureNotSupportedException()
31 | override fun getURL(label: String?): URL = throw SQLFeatureNotSupportedException()
32 | override fun getObject(index: Int, map: MutableMap>?): Any = throw SQLFeatureNotSupportedException()
33 | override fun getObject(label: String?,map: MutableMap>?): Any = throw SQLFeatureNotSupportedException()
34 | override fun getObject(index: Int, type: Class?): T = throw SQLFeatureNotSupportedException()
35 | override fun getObject(label: String?, type: Class?): T = throw SQLFeatureNotSupportedException()
36 | override fun getClob(index: Int): Clob = throw SQLFeatureNotSupportedException()
37 | override fun getClob(label: String?): Clob = throw SQLFeatureNotSupportedException()
38 | override fun getArray(index: Int): java.sql.Array = throw SQLFeatureNotSupportedException()
39 | override fun getArray(label: String?): java.sql.Array = throw SQLFeatureNotSupportedException()
40 | override fun getRef(index: Int): Ref = throw SQLFeatureNotSupportedException()
41 | override fun getRef(label: String?): Ref = throw SQLFeatureNotSupportedException()
42 |
43 | override fun getRowId(index: Int): RowId = throw SQLFeatureNotSupportedException()
44 | override fun getRowId(label: String?): RowId = throw SQLFeatureNotSupportedException()
45 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/resultset/WebSocketResultSet.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.resultset
2 |
3 | import arrow.core.Either
4 | import arrow.core.getOrHandle
5 | import io.lenses.jdbc4.client.JdbcError
6 | import io.lenses.jdbc4.row.Row
7 | import io.lenses.jdbc4.util.Logging
8 | import org.apache.avro.Schema
9 | import java.sql.ResultSet
10 | import java.sql.ResultSetMetaData
11 | import java.sql.SQLException
12 | import java.sql.Statement
13 | import java.util.concurrent.BlockingQueue
14 |
15 | interface WebsocketConnection {
16 | val queue: BlockingQueue
17 | fun close()
18 | fun isClosed(): Boolean
19 | }
20 |
21 | abstract class StreamingRowResultSet : RowResultSet(),
22 | PullForwardOnlyResultSet,
23 | ImmutableResultSet,
24 | UnsupportedTypesResultSet,
25 | Logging
26 |
27 | /**
28 | * An implementation of [ResultSet] that retrieves records from a websocket via a queue.
29 | */
30 | class WebSocketResultSet(private val stmt: Statement?,
31 | private val schema: Schema, // the schema for the records that will follow
32 | private val conn: WebsocketConnection,
33 | private val converter: (String, Schema) -> Either)
34 | : StreamingRowResultSet() {
35 |
36 | private var rowNumber: Int = 0
37 | private var row: Row? = null
38 | private var completed = false
39 |
40 | override fun next(): Boolean {
41 | return if (completed) false else {
42 | when (val msg = conn.queue.take()) {
43 | null -> {
44 | row = null
45 | completed = true
46 | conn.close()
47 | false
48 | }
49 | else -> {
50 | rowNumber++
51 | row = converter(msg, schema).getOrHandle { throw SQLException(it.cause) }
52 | row != null
53 | }
54 | }
55 | }
56 | }
57 |
58 | override fun isClosed(): Boolean = conn.isClosed()
59 | override fun close() {
60 | conn.close()
61 | }
62 |
63 | override fun getRow(): Int = rowNumber
64 | override fun currentRow(): Row = row!!
65 |
66 | override fun getMetaData(): ResultSetMetaData = meta()
67 | override fun meta(): AvroSchemaResultSetMetaData = AvroSchemaResultSetMetaData(schema)
68 |
69 | override fun getStatement(): Statement? = stmt
70 | }
71 |
72 |
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/resultset/filter.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.resultset
2 |
3 | import java.sql.ResultSet
4 |
5 | fun ResultSet.filter(f: (ResultSet) -> Boolean): ResultSet {
6 | val outer = this
7 | return object : ResultSet by this {
8 | override tailrec fun next(): Boolean {
9 | if (!outer.next())
10 | return false
11 | if (f(this))
12 | return true
13 | return next()
14 | }
15 | }
16 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/resultset/map.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.resultset
2 |
3 | import io.lenses.jdbc4.row.Row
4 | import org.apache.avro.Schema
5 | import java.sql.ResultSetMetaData
6 | import java.sql.Statement
7 |
8 | fun RowResultSet.map(schema: Schema, f: (Row) -> Row): RowResultSet {
9 |
10 | val self = this
11 |
12 | return object : StreamingRowResultSet() {
13 |
14 | override fun currentRow(): Row = f(self.currentRow())
15 | override fun getRow(): Int = self.row
16 |
17 | override fun next(): Boolean = self.next()
18 | override fun getStatement(): Statement = self.statement
19 |
20 | override fun close(): Unit = self.close()
21 | override fun isClosed(): Boolean = self.isClosed
22 |
23 | override fun meta(): ResultSetMetaData = AvroSchemaResultSetMetaData(schema)
24 | override fun getMetaData(): ResultSetMetaData = meta()
25 | }
26 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/row/ConvertingRow.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.row
2 |
3 | import java.io.Reader
4 | import java.io.StringReader
5 | import java.math.BigDecimal
6 | import java.sql.RowId
7 | import java.sql.SQLException
8 | import java.sql.Time
9 | import java.sql.Timestamp
10 | import java.time.Instant
11 | import java.time.ZoneId
12 | import java.util.*
13 |
14 | /**
15 | * An implementation of [Row] that will attempt to convert its values
16 | * to the required data type.
17 | */
18 | abstract class ConvertingRow : Row {
19 |
20 | override fun getByte(index: Int): Byte {
21 | return when (val value = getObject(index)) {
22 | is Int -> value.toByte()
23 | is Long -> value.toByte()
24 | is String -> value.toByte()
25 | else -> throw SQLException("Unable to convert $value to byte")
26 | }
27 | }
28 |
29 | override fun charStream(index: Int): Reader {
30 | return when (val value = getObject(index)) {
31 | is String -> StringReader(value)
32 | else -> throw SQLException("Unable to convert $value to Reader")
33 | }
34 | }
35 |
36 | override fun getFloat(index: Int): Float {
37 | return when (val value = getObject(index)) {
38 | is Double -> value.toFloat()
39 | is Float -> value
40 | is String -> value.toFloat()
41 | else -> throw SQLException("Unable to convert $value to float")
42 | }
43 | }
44 |
45 | override fun getBoolean(index: Int): Boolean {
46 | return when (val value = getObject(index)) {
47 | is Boolean -> value
48 | is String -> value == "true"
49 | else -> throw SQLException("Unable to convert $value to boolean")
50 | }
51 | }
52 |
53 | override fun getBigDecimal(index: Int, scale: Int): BigDecimal {
54 | return when (val value = getObject(index)) {
55 | is Long -> BigDecimal.valueOf(value, scale)
56 | is Int -> BigDecimal.valueOf(value.toLong(), scale)
57 | is String -> BigDecimal(value)
58 | else -> throw SQLException("Unable to convert $value to BigDecimal")
59 | }
60 | }
61 |
62 | override fun getBigDecimal(index: Int): BigDecimal {
63 | return when (val value = getObject(index)) {
64 | is Long -> BigDecimal.valueOf(value)
65 | is Int -> BigDecimal.valueOf(value.toLong())
66 | is Double -> BigDecimal.valueOf(value)
67 | is Float -> BigDecimal.valueOf(value.toDouble())
68 | is String -> BigDecimal(value)
69 | else -> throw SQLException("Unable to convert $value to BigDecimal")
70 | }
71 | }
72 |
73 | override fun getTime(index: Int): Time = getTime(index, null)
74 | override fun getTime(index: Int, cal: Calendar?): Time {
75 | val instant = when (val value = getObject(index)) {
76 | is Int -> Instant.ofEpochMilli(value.toLong())
77 | is Long -> Instant.ofEpochMilli(value)
78 | is String -> Instant.ofEpochMilli(value.toLong())
79 | else -> throw SQLException("Unable to convert $value to Time")
80 | }
81 | val zone = cal?.timeZone?.toZoneId() ?: ZoneId.of("Z")
82 | return Time.valueOf(instant.atZone(zone).toLocalTime())
83 | }
84 |
85 | override fun getDate(index: Int): java.sql.Date = getDate(index, null)
86 | override fun getDate(index: Int, cal: Calendar?): java.sql.Date {
87 | val instant = when (val value = getObject(index)) {
88 | is Int -> Instant.ofEpochMilli(value.toLong())
89 | is Long -> Instant.ofEpochMilli(value)
90 | is String -> Instant.ofEpochMilli(value.toLong())
91 | else -> throw SQLException("Unable to convert $value to java.sql.Date")
92 | }
93 | val zone = cal?.timeZone?.toZoneId() ?: ZoneId.of("Z")
94 | return java.sql.Date(instant.atZone(zone).toInstant().toEpochMilli())
95 | }
96 |
97 | override fun getTimestamp(index: Int): Timestamp = getTimestamp(index, null)
98 | override fun getTimestamp(index: Int, cal: Calendar?): Timestamp {
99 | val instant = when (val value = getObject(index)) {
100 | is Int -> Instant.ofEpochMilli(value.toLong())
101 | is Long -> Instant.ofEpochMilli(value)
102 | is String -> Instant.ofEpochMilli(value.toLong())
103 | else -> throw SQLException("Unable to convert $value to Timestamp")
104 | }
105 | val zone = cal?.timeZone?.toZoneId() ?: ZoneId.of("Z")
106 | return Timestamp.valueOf(instant.atZone(zone).toLocalDateTime())
107 | }
108 |
109 | override fun getBytes(index: Int): ByteArray {
110 | return when (val value = getObject(index)) {
111 | is ByteArray -> value
112 | else -> throw SQLException("Unable to convert $value to byte[]")
113 | }
114 | }
115 |
116 | override fun getDouble(index: Int): Double {
117 | return when (val value = getObject(index)) {
118 | is Double -> value
119 | is Float -> value.toDouble()
120 | is Int -> value.toDouble()
121 | is Long -> value.toDouble()
122 | is String -> value.toDouble()
123 | else -> throw SQLException("Unable to convert $value to double")
124 | }
125 | }
126 |
127 | override fun getString(index: Int): String? {
128 | val value = getObject(index)
129 | return value?.toString()
130 | }
131 |
132 | override fun getLong(index: Int): Long {
133 | return when (val value = getObject(index)) {
134 | is Int -> value.toLong()
135 | is Long -> value
136 | is String -> value.toLong()
137 | else -> throw SQLException("Unable to convert $value to String")
138 | }
139 | }
140 |
141 | override fun getInt(index: Int): Int {
142 | return when (val value = getObject(index)) {
143 | is Int -> value
144 | is Long -> value.toInt()
145 | is String -> value.toInt()
146 | else -> throw SQLException("Unable to convert $value to int")
147 | }
148 | }
149 |
150 | override fun getShort(index: Int): Short {
151 | return when (val value = getObject(index)) {
152 | is Int -> value.toShort()
153 | is Long -> value.toShort()
154 | is String -> value.toShort()
155 | else -> throw SQLException("Unable to convert $value to short")
156 | }
157 | }
158 |
159 | override fun getRowId(index: Int): RowId = LongRowId(index.toLong())
160 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/row/LongRowId.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.row
2 |
3 | import java.sql.RowId
4 |
5 | class LongRowId(val id: Long) : RowId {
6 | override fun getBytes(): ByteArray = id.toString().toByteArray()
7 | }
8 |
9 | /**
10 | * A [RowId] that wraps the offset field returned in Kafka.
11 | */
12 | class OffsetRowId(val id: Long) : RowId {
13 | override fun getBytes(): ByteArray = id.toString().toByteArray()
14 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/row/Row.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.row
2 |
3 | import java.io.Reader
4 | import java.math.BigDecimal
5 | import java.sql.RowId
6 | import java.sql.Time
7 | import java.sql.Timestamp
8 | import java.util.*
9 |
10 | interface Row {
11 |
12 | fun getObject(index: Int): Any?
13 |
14 | fun getRowId(index: Int): RowId
15 |
16 | fun charStream(index: Int): Reader?
17 |
18 | fun getBigDecimal(index: Int, scale: Int): BigDecimal?
19 | fun getBigDecimal(index: Int): BigDecimal?
20 | fun getBoolean(index: Int): Boolean
21 | fun getByte(index: Int): Byte
22 | fun getBytes(index: Int): ByteArray?
23 |
24 | fun getDate(index: Int): java.sql.Date?
25 | fun getDate(index: Int, cal: Calendar?): java.sql.Date?
26 |
27 | fun getFloat(index: Int): Float
28 |
29 | fun getInt(index: Int): Int
30 |
31 | fun getTime(index: Int): Time?
32 | fun getTime(index: Int, cal: Calendar?): Time?
33 |
34 | fun getLong(index: Int): Long
35 |
36 | fun getTimestamp(index: Int): Timestamp?
37 | fun getTimestamp(index: Int, cal: Calendar?): Timestamp?
38 |
39 | fun getDouble(index: Int): Double
40 |
41 | // fun indexOf(alias: String): Int
42 | // fun getString(alias: String): String? = getString(indexOf(alias))
43 | fun getString(index: Int): String?
44 |
45 | fun getShort(index: Int): Short
46 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/row/rows.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.row
2 |
3 | import org.apache.avro.generic.GenericData
4 | import java.sql.SQLException
5 |
6 | /**
7 | * An implementation of [Row] that uses a static list of values
8 | * provided at construction time.
9 | */
10 | class ListRow(private val values: List) : ConvertingRow() {
11 | override fun getObject(index: Int): Any? = try {
12 | values[index - 1]
13 | } catch (ex: IndexOutOfBoundsException) {
14 | throw SQLException("Column index out of bounds $index")
15 | }
16 | }
17 |
18 | class PairRow(private val values: List>) : ConvertingRow() {
19 | override fun getObject(index: Int): Any? = try {
20 | values[index - 1].second
21 | } catch (ex: IndexOutOfBoundsException) {
22 | throw SQLException("Column index out of bounds $index")
23 | }
24 | }
25 |
26 | class RecordRow(val record: GenericData.Record) : ConvertingRow() {
27 | override fun getObject(index: Int): Any? {
28 | return record.get(index - 1)
29 | }
30 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/statements/LStatement.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.statements
2 |
3 | import arrow.core.getOrHandle
4 | import io.lenses.jdbc4.client.LensesClient
5 | import io.lenses.jdbc4.resultset.emptyResultSet
6 | import kotlinx.coroutines.runBlocking
7 | import java.sql.Connection
8 | import java.sql.ResultSet
9 | import java.sql.SQLException
10 |
11 | open class LStatement(private val conn: Connection,
12 | private val client: LensesClient) : DefaultStatement,
13 | AutoCloseable,
14 | IWrapperStatement,
15 | ReadOnlyStatement,
16 | OfflineStatement {
17 |
18 | // the last resultset retrieved by this statement
19 | private var rs: ResultSet = emptyResultSet
20 |
21 | /**
22 | * Executes the given SQL statement, which returns a single
23 | * [ResultSet] object.
24 | *
25 | * @param sql an SQL statement to be sent to the database, typically a
26 | * static SQL SELECT statement
27 | * @return a [ResultSet] object that contains the data produced
28 | * by the given query; never null
29 | */
30 | override fun executeQuery(sql: String): ResultSet = runBlocking {
31 | rs = client.execute(sql)
32 | .getOrHandle { throw SQLException("Could not execute query: $it", it.cause) }
33 | rs
34 | }
35 |
36 | override fun executeUpdate(sql: String): Int = runBlocking {
37 | executeQuery(sql)
38 | 0
39 | }
40 |
41 | /**
42 | * @return true if the first result is a [ResultSet]
43 | * object; false if it is an update count or there are
44 | * no results
45 | */
46 | override fun execute(sql: String): Boolean {
47 | executeQuery(sql)
48 | return true
49 | }
50 |
51 | override fun getConnection(): Connection = conn
52 | override fun getResultSet(): ResultSet = rs
53 |
54 | override fun getQueryTimeout(): Int = 0 // client.connectionRequestTimeout()
55 | override fun setQueryTimeout(seconds: Int) = throw UnsupportedOperationException()
56 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/statements/ReadOnlyStatement.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.statements
2 |
3 | import java.sql.SQLFeatureNotSupportedException
4 | import java.sql.Statement
5 |
6 | // extends statement implementing methods that are not supported by read only statements
7 | interface ReadOnlyStatement : Statement {
8 |
9 | override fun execute(sql: String?,
10 | autoGeneratedKeys: Int): Boolean = throw SQLFeatureNotSupportedException("Auto generated keys are not supported by Lenses")
11 |
12 | override fun execute(sql: String?,
13 | columnIndexes: IntArray?): Boolean = throw SQLFeatureNotSupportedException("Auto generated keys are not supported by Lenses")
14 |
15 | override fun execute(sql: String?,
16 | columnNames: Array?): Boolean = throw SQLFeatureNotSupportedException("Auto generated keys are not supported by Lenses")
17 |
18 | override fun addBatch(sql: String?) = throw SQLFeatureNotSupportedException()
19 | override fun executeUpdate(sql: String?, autoGeneratedKeys: Int): Int = throw SQLFeatureNotSupportedException()
20 | override fun executeUpdate(sql: String?, columnIndexes: IntArray?): Int = throw SQLFeatureNotSupportedException()
21 | override fun executeUpdate(sql: String?,
22 | columnNames: Array?): Int = throw SQLFeatureNotSupportedException()
23 |
24 | override fun getUpdateCount(): Int = -1
25 |
26 | override fun clearBatch() = unsupported()
27 | override fun executeBatch(): IntArray = unsupported()
28 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/statements/SelectPreparedStatement.kt:
--------------------------------------------------------------------------------
1 | //package io.lenses.jdbc4.statements
2 | //
3 | //import io.lenses.jdbc4.client.domain.StreamingSelectResult
4 | //import io.lenses.jdbc4.resultset.EmptyResultSetMetaData
5 | //import io.lenses.jdbc4.resultset.emptyResultSet
6 | //import io.lenses.jdbc4.util.Logging
7 | //import java.sql.Connection
8 | //import java.sql.ParameterMetaData
9 | //import java.sql.ResultSet
10 | //import java.sql.ResultSetMetaData
11 | //import java.sql.SQLFeatureNotSupportedException
12 | //import java.util.concurrent.TimeUnit
13 | //
14 | //class SelectPreparedStatement(private val conn: Connection,
15 | // private val sql: String) : AbstractPreparedStatement,
16 | // ReadOnlyPreparedStatement,
17 | // UnsupportedTypesPreparedStatement,
18 | // Logging {
19 | //
20 | // override fun getResultSet(): ResultSet = TODO()
21 | //
22 | // override fun getConnection(): Connection = conn
23 | //
24 | // override fun getQueryTimeout(): Int = 0 // client.connectionRequestTimeout()
25 | // override fun setQueryTimeout(seconds: Int) = throw UnsupportedOperationException()
26 | //
27 | // override fun executeUpdate(): Int = throw SQLFeatureNotSupportedException("Cannot call updated on a select query")
28 | //
29 | // // the last resultset generated by this statement
30 | // private var rs: ResultSet = emptyResultSet
31 | //
32 | // /**
33 | // * Clears the current parameter values immediately.
34 | // * That is, the current record that is being "built" will be reset to empty.
35 | // */
36 | // override fun clearParameters() = throw SQLFeatureNotSupportedException()
37 | //
38 | // override fun execute(): Boolean {
39 | // // in this execute method we must block until we are completed
40 | // // or we receive a record, otherwise we don't know if we can return true or false
41 | // val result = select(sql)
42 | // // todo rs = StreamingRowResultSet(this, result)
43 | // return result.hasData(1, TimeUnit.DAYS)
44 | // }
45 | //
46 | // private fun select(sql: String): StreamingSelectResult {
47 | // //val result = client.select(sql)
48 | // // todo rs = StreamingRowResultSet(this, result)
49 | // return TODO()
50 | // }
51 | //
52 | // override fun executeQuery(): ResultSet {
53 | // select(sql)
54 | // return rs
55 | // }
56 | //
57 | //// -- meta data methods
58 | //
59 | // /**
60 | // * @return an empty result set because we do not yet support prepared statements for queries
61 | // */
62 | // override fun getMetaData(): ResultSetMetaData = EmptyResultSetMetaData
63 | //
64 | // override fun getParameterMetaData(): ParameterMetaData = throw SQLFeatureNotSupportedException()
65 | //
66 | //
67 | //// == auto generated keys are not supported by kafka/lenses ==
68 | //
69 | // override fun getGeneratedKeys(): ResultSet = throw SQLFeatureNotSupportedException("Auto generated keys are not supported by Lenses")
70 | //}
71 | //
72 |
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/statements/statements.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.statements
2 |
3 | import io.lenses.jdbc4.IWrapper
4 | import io.lenses.jdbc4.util.Logging
5 | import java.io.InputStream
6 | import java.io.Reader
7 | import java.math.BigDecimal
8 | import java.net.URL
9 | import java.sql.Blob
10 | import java.sql.Clob
11 | import java.sql.Date
12 | import java.sql.NClob
13 | import java.sql.PreparedStatement
14 | import java.sql.Ref
15 | import java.sql.ResultSet
16 | import java.sql.RowId
17 | import java.sql.SQLFeatureNotSupportedException
18 | import java.sql.SQLWarning
19 | import java.sql.SQLXML
20 | import java.sql.Statement
21 | import java.sql.Time
22 | import java.sql.Timestamp
23 | import java.util.*
24 |
25 | typealias Unsupported = SQLFeatureNotSupportedException
26 |
27 | fun unsupported(): Nothing = throw SQLFeatureNotSupportedException()
28 |
29 | /**
30 | * Overrides functions in [Statement] that close a stream.
31 | * With HTTP implementations there is nothing to close, so these are noops.
32 | */
33 | interface OfflineStatement : Statement, Logging {
34 |
35 | // -- Lenses statements are offline, and so there's nothing to close
36 |
37 | override fun isCloseOnCompletion(): Boolean = true
38 | override fun close() {} // lsql-statements have no resources associated
39 | override fun isClosed(): Boolean = true
40 | override fun closeOnCompletion() {}
41 | override fun cancel() {}
42 | }
43 |
44 | interface IWrapperStatement : Statement, IWrapper {
45 | override fun isWrapperFor(iface: Class<*>?): Boolean = _isWrapperFor(iface)
46 | override fun unwrap(iface: Class): T = _unwrap(iface)
47 | }
48 |
49 | /**
50 | * Sets up some default values for all Lenses [Statement] instances.
51 | */
52 | interface DefaultStatement : Statement {
53 |
54 | override fun clearWarnings() {}
55 | override fun getWarnings(): SQLWarning = SQLWarning()
56 |
57 | override fun getMoreResults(): Boolean = false
58 | override fun getMoreResults(current: Int): Boolean = false
59 |
60 | // max bytes in a column, irrelevant for lenses sql
61 | override fun getMaxFieldSize(): Int = -1
62 |
63 | override fun setMaxFieldSize(max: Int) {}
64 |
65 | // equivalent to a limit
66 | override fun getMaxRows(): Int = -1
67 |
68 | override fun setMaxRows(max: Int) {}
69 |
70 | override fun getFetchSize(): Int = -1
71 | override fun setFetchSize(rows: Int) {}
72 | override fun getFetchDirection(): Int = ResultSet.FETCH_FORWARD
73 | override fun setFetchDirection(direction: Int) {
74 | if (direction != ResultSet.FETCH_FORWARD)
75 | throw SQLFeatureNotSupportedException("Lenses ResultSets can only be ResultSet.FETCH_FORWARD")
76 | }
77 |
78 | override fun setPoolable(poolable: Boolean) {
79 | }
80 |
81 | override fun getResultSetHoldability(): Int = ResultSet.CLOSE_CURSORS_AT_COMMIT
82 |
83 | override fun setCursorName(name: String?) = throw SQLFeatureNotSupportedException()
84 |
85 | override fun setEscapeProcessing(enable: Boolean) {}
86 | override fun isPoolable(): Boolean = false
87 |
88 | override fun getResultSetType(): Int = ResultSet.TYPE_FORWARD_ONLY
89 | override fun getResultSetConcurrency(): Int = ResultSet.CONCUR_READ_ONLY
90 |
91 | // == auto generated keys are not supported by kafka/lenses ==
92 | override fun getGeneratedKeys(): ResultSet = throw SQLFeatureNotSupportedException("Auto generated keys are not supported by Lenses")
93 | }
94 |
95 | /**
96 | * Overrides functions in [Statement] that cannot be executed on a [PreparedStatement].
97 | */
98 | interface AbstractPreparedStatement : PreparedStatement,
99 | UnsupportedTypesPreparedStatement,
100 | IWrapperStatement,
101 | DefaultStatement,
102 | OfflineStatement,
103 | Logging {
104 |
105 | private fun unsupported(): Nothing = throw Unsupported("This method cannot be called on a prepared statement")
106 |
107 | override fun addBatch(sql: String?) = unsupported()
108 |
109 | // -- execute methods that accept SQL are not used by prepared statements
110 | override fun executeQuery(sql: String): ResultSet = unsupported()
111 |
112 | override fun execute(sql: String): Boolean = unsupported()
113 | override fun execute(sql: String?, autoGeneratedKeys: Int): Boolean = unsupported()
114 | override fun execute(sql: String?, columnNames: Array?): Boolean = unsupported()
115 | override fun execute(sql: String?, columnIndexes: IntArray?): Boolean = unsupported()
116 |
117 | override fun executeUpdate(sql: String?): Int = unsupported()
118 | override fun executeUpdate(sql: String?, autoGeneratedKeys: Int): Int = unsupported()
119 | override fun executeUpdate(sql: String?, columnIndexes: IntArray?): Int = unsupported()
120 | override fun executeUpdate(sql: String?, columnNames: Array?): Int = unsupported()
121 |
122 | }
123 |
124 | interface ReadOnlyPreparedStatement : PreparedStatement {
125 | override fun addBatch() = unsupported()
126 | override fun clearBatch() = unsupported()
127 | override fun executeBatch(): IntArray = unsupported()
128 | override fun getUpdateCount(): Int = -1
129 | }
130 |
131 | /**
132 | * Overrides functions in [Statement] that operate on types not supported by Lenses SQL, such as [Clob].
133 | */
134 | interface UnsupportedTypesPreparedStatement : PreparedStatement {
135 |
136 | // -- methods which set values on the current record
137 |
138 | override fun setCharacterStream(parameterIndex: Int, reader: Reader?, length: Int) = unsupported()
139 | override fun setCharacterStream(parameterIndex: Int, reader: Reader?, length: Long) = unsupported()
140 | override fun setCharacterStream(parameterIndex: Int, reader: Reader?) = unsupported()
141 | override fun setDate(parameterIndex: Int, d: Date?) = unsupported()
142 | override fun setDate(parameterIndex: Int, d: Date?, cal: Calendar?) = unsupported()
143 | override fun setObject(parameterIndex: Int, x: Any?) = unsupported()
144 | override fun setLong(parameterIndex: Int, x: Long) = unsupported()
145 | override fun setNString(parameterIndex: Int, x: String?) = unsupported()
146 | override fun setURL(parameterIndex: Int, u: URL?) = unsupported()
147 | override fun setFloat(parameterIndex: Int, f: Float) = unsupported()
148 | override fun setTime(parameterIndex: Int, t: Time?) = unsupported()
149 | override fun setTime(parameterIndex: Int, x: Time?, cal: Calendar?) = unsupported()
150 | override fun setNCharacterStream(parameterIndex: Int, value: Reader?, length: Long) = unsupported()
151 | override fun setNCharacterStream(parameterIndex: Int, value: Reader?) = unsupported()
152 | override fun setInt(parameterIndex: Int, x: Int) = unsupported()
153 | override fun setDouble(parameterIndex: Int, x: Double) = unsupported()
154 | override fun setBigDecimal(parameterIndex: Int, x: BigDecimal?) = unsupported()
155 | override fun setObject(parameterIndex: Int, x: Any?, targetSqlType: Int) = unsupported()
156 | override fun setString(parameterIndex: Int, x: String?) = unsupported()
157 | override fun setNull(parameterIndex: Int, sqlType: Int) = unsupported()
158 | override fun setNull(parameterIndex: Int, sqlType: Int, typeName: String?) = unsupported()
159 | override fun setTimestamp(parameterIndex: Int, ts: Timestamp?) = unsupported()
160 | override fun setTimestamp(parameterIndex: Int, ts: Timestamp?, cal: Calendar?) = unsupported()
161 | override fun setShort(parameterIndex: Int, s: Short) = unsupported()
162 | override fun setBoolean(parameterIndex: Int, b: Boolean) = unsupported()
163 | override fun setByte(parameterIndex: Int, b: Byte) = unsupported()
164 |
165 | // -- unsupported types --
166 |
167 | override fun setBinaryStream(parameterIndex: Int, x: InputStream?, length: Int) = unsupported()
168 | override fun setBinaryStream(parameterIndex: Int, x: InputStream?, length: Long) = unsupported()
169 | override fun setBinaryStream(parameterIndex: Int, x: InputStream?) = unsupported()
170 | override fun setClob(parameterIndex: Int, x: Clob?) = unsupported()
171 | override fun setClob(parameterIndex: Int, reader: Reader?, length: Long) = unsupported()
172 | override fun setClob(parameterIndex: Int, reader: Reader?) = unsupported()
173 | override fun setUnicodeStream(parameterIndex: Int, x: InputStream?, length: Int) = unsupported()
174 | override fun setObject(parameterIndex: Int, x: Any?, targetSqlType: Int, scaleOrLength: Int) = unsupported()
175 | override fun setBytes(parameterIndex: Int, x: ByteArray?) = unsupported()
176 | override fun setSQLXML(parameterIndex: Int, xmlObject: SQLXML?) = unsupported()
177 | override fun setRef(parameterIndex: Int, x: Ref?) = unsupported()
178 | override fun setBlob(parameterIndex: Int, x: Blob?) = unsupported()
179 | override fun setBlob(parameterIndex: Int, inputStream: InputStream?, length: Long) = unsupported()
180 | override fun setBlob(parameterIndex: Int, inputStream: InputStream?) = unsupported()
181 | override fun setArray(parameterIndex: Int, x: java.sql.Array?) = unsupported()
182 | override fun setRowId(parameterIndex: Int, x: RowId?) = unsupported()
183 | override fun setAsciiStream(parameterIndex: Int, x: InputStream?, length: Int) = unsupported()
184 | override fun setAsciiStream(parameterIndex: Int, x: InputStream?, length: Long) = unsupported()
185 | override fun setAsciiStream(parameterIndex: Int, x: InputStream?) = unsupported()
186 | override fun setNClob(parameterIndex: Int, value: NClob?) = unsupported()
187 | override fun setNClob(parameterIndex: Int, reader: Reader?, length: Long) = unsupported()
188 | override fun setNClob(parameterIndex: Int, reader: Reader?) = unsupported()
189 | }
--------------------------------------------------------------------------------
/src/main/kotlin/io/lenses/jdbc4/util/Logging.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.util
2 |
3 | import org.slf4j.Logger
4 | import org.slf4j.LoggerFactory
5 |
6 | interface Logging {
7 | val logger: Logger
8 | get() = LoggerFactory.getLogger(javaClass)
9 |
10 | }
--------------------------------------------------------------------------------
/src/main/resources/lsql.versions:
--------------------------------------------------------------------------------
1 | driver.major=3
2 | driver.minor=0
3 | lenses.major=3
4 | lenses.minor=0
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/BatchInsertTest.kt:
--------------------------------------------------------------------------------
1 | //package io.lenses.jdbc4
2 | //
3 | //import io.kotlintest.shouldBe
4 | //import io.kotlintest.shouldThrow
5 | //import io.kotlintest.specs.WordSpec
6 | //import java.sql.DriverManager
7 | //import java.sql.SQLException
8 | //
9 | //class BatchInsertTest : WordSpec(), CCData {
10 | // init {
11 | // io.lenses.jdbc4.LDriver()
12 | //
13 | // val conn = DriverManager.getConnection("jdbc:lenses:kafka:http://localhost:3030", "admin", "admin")
14 | //
15 | // "JDBC Driver" should {
16 | // "support batched prepared statements" {
17 | //
18 | // val batchSize = 20
19 | // val values = Array(batchSize, { _ -> generateCC() })
20 | // val sql = "SET _ktype='STRING'; SET _vtype='AVRO';INSERT INTO cc_data (customerFirstName, number, currency, customerLastName, country, blocked) values (?,?,?,?,?,?)"
21 | // val stmt = conn.prepareStatement(sql)
22 | //
23 | // for (value in values) {
24 | // stmt.setString(1, value.firstname)
25 | // stmt.setString(2, value.number)
26 | // stmt.setString(3, value.currency)
27 | // stmt.setString(4, value.surname)
28 | // stmt.setString(5, value.country)
29 | // stmt.setBoolean(6, value.blocked)
30 | // stmt.addBatch()
31 | // }
32 | //
33 | // val result = stmt.executeBatch()
34 | // result.size shouldBe batchSize
35 | // result.toSet() shouldBe setOf(0)
36 | // stmt.clearBatch()
37 | //
38 | // // now we must check that our values have been inserted
39 | // for (value in values) {
40 | // val rs = conn.createStatement().executeQuery("SELECT * FROM cc_data WHERE _ktype='STRING' AND _vtype='AVRO' AND customerLastName='${value.surname}' and customerFirstName='${value.firstname}' AND number='${value.number}'")
41 | // rs.next()
42 | // rs.getString("customerFirstName") shouldBe value.firstname
43 | // rs.getString("customerLastName") shouldBe value.surname
44 | // rs.getString("number") shouldBe value.number
45 | // rs.getString("currency") shouldBe value.currency
46 | // rs.getString("country") shouldBe value.country
47 | // rs.getBoolean("blocked") shouldBe value.blocked
48 | // }
49 | // }
50 | // "throw exception if batch size exceeded" {
51 | // val sql = "INSERT INTO cc_data (customerFirstName, number, currency, customerLastName, country, blocked) values (?,?,?,?,?,?)"
52 | // val stmt = conn.prepareStatement(sql)
53 | // fun add() {
54 | // stmt.setString(1, "a")
55 | // stmt.setString(2, "123")
56 | // stmt.setString(3, "GBP")
57 | // stmt.setString(4, "b")
58 | // stmt.setString(5, "UK")
59 | // stmt.setBoolean(6, false)
60 | // stmt.addBatch()
61 | // }
62 | // for (k in 1..io.lenses.jdbc4.Constants.BATCH_HARD_LIMIT) {
63 | // add()
64 | // }
65 | // // the next element should exceed the batch limit and throw an exception
66 | // shouldThrow {
67 | // add()
68 | // }
69 | // }
70 | // }
71 | // }
72 | //}
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/BatchNestedInsertStressTest.kt:
--------------------------------------------------------------------------------
1 | //package io.lenses.jdbc4
2 | //
3 | //import io.kotlintest.shouldBe
4 | //import io.kotlintest.specs.WordSpec
5 | //import java.sql.DriverManager
6 | //import java.util.*
7 | //
8 | //class BatchNestedInsertStressTest : WordSpec(), LocationData {
9 | //
10 | // override val random: Random = Random()
11 | //
12 | // init {
13 | // io.lenses.jdbc4.LDriver()
14 | //
15 | // val conn = DriverManager.getConnection("jdbc:lenses:kafka:http://localhost:3030", "admin", "admin")
16 | //
17 | // "JDBC Driver" should {
18 | // "support batched prepared statements" {
19 | //
20 | // val batchSize = 100
21 | // val count = 10000
22 | // val locations: Array = Array(count, { _ -> randomLocation() })
23 | // logger.debug("Generated $count locations")
24 | //
25 | // val topic = newTopicName()
26 | // registerValueSchema(topic, schema())
27 | // createTopic(topic)
28 | //
29 | //
30 | // val sql = "SET _ktype='STRING'; SET _vtype='AVRO';INSERT INTO `$topic` (id, address.street, address.number, address.zip,address.state, geo.lat, geo.lon) values (?,?,?,?,?,?,?)"
31 | // val stmt = conn.prepareStatement(sql)
32 | //
33 | // locations.asList().chunked(batchSize).forEach { batch ->
34 | // for (location in batch) {
35 | // stmt.setLong(1, location.id)
36 | // stmt.setString(2, location.address.street)
37 | // stmt.setInt(3, location.address.number)
38 | // stmt.setInt(4, location.address.zip)
39 | // stmt.setString(5, location.address.street)
40 | // stmt.setDouble(6, location.geo.lat)
41 | // stmt.setDouble(7, location.geo.lon)
42 | // stmt.addBatch()
43 | // }
44 | // val result = stmt.executeBatch()
45 | // logger.debug("Executed batch")
46 | // result.size shouldBe batchSize
47 | // result.toSet() shouldBe setOf(0)
48 | // stmt.clearBatch()
49 | // }
50 | //
51 | // // now we must check that our values have been inserted
52 | // for (location in arrayOf(locations.first(), locations.last())) {
53 | // val rs = conn.createStatement().executeQuery("SELECT * FROM $topic WHERE _ktype='STRING' AND _vtype='AVRO' AND geo.lat=${location.geo.lat} and geo.lon=${location.geo.lon} AND `address`.`zip`=${location.address.zip}")
54 | // rs.next()
55 | // rs.getLong("id") shouldBe location.id
56 | // /*rs.getString("address.street") shouldBe location.address.street
57 | // rs.getInt("address.number") shouldBe location.address.number
58 | // rs.getInt("address.zip") shouldBe location.address.zip
59 | // rs.getString("address.state") shouldBe location.address.state
60 | // rs.getDouble("geo.lat") shouldBe location.geo.lat
61 | // rs.getDouble("geo.lon") shouldBe location.geo.lon
62 | // */
63 | // }
64 | // }
65 | // }
66 | // }
67 | //}
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/FrameToRecordTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import arrow.core.getOrHandle
4 | import io.kotlintest.fail
5 | import io.kotlintest.shouldBe
6 | import io.kotlintest.shouldThrow
7 | import io.kotlintest.specs.FunSpec
8 | import io.lenses.jdbc4.client.frameToRecord
9 | import org.apache.avro.SchemaBuilder
10 | import java.sql.SQLException
11 |
12 | class FrameToRecordTest : FunSpec() {
13 | init {
14 | test("frame to record for flat schema with all values") {
15 | val schema = SchemaBuilder.builder().record("foo")
16 | .fields()
17 | .optionalString("a")
18 | .requiredBoolean("b")
19 | .optionalInt("c")
20 | .endRecord()
21 | val json = """{ "data" : { "value": { "a":"hello", "b":true, "c": 123 } } }"""
22 | val row = frameToRecord(json, schema).getOrHandle { fail(it.toString()) }
23 | row.getObject(1) shouldBe "hello"
24 | row.getObject(2) shouldBe true
25 | row.getObject(3) shouldBe 123
26 |
27 | shouldThrow { row.getObject(4) }
28 | }
29 |
30 | test("frame to record for flat schema with missing values") {
31 | val schema = SchemaBuilder.builder().record("foo")
32 | .fields()
33 | .optionalString("a")
34 | .requiredBoolean("b")
35 | .optionalInt("c")
36 | .endRecord()
37 | val json = """{ "data" : { "value": { "a":"hello", "c": 123 } } }"""
38 | val row = frameToRecord(json, schema).getOrHandle { fail(it.toString()) }
39 | row.getObject(1) shouldBe "hello"
40 | row.getObject(2) shouldBe null
41 | row.getObject(3) shouldBe 123
42 |
43 | shouldThrow { row.getObject(4) }
44 | }
45 |
46 | test("frame to record for nested schema") {
47 |
48 | val b = SchemaBuilder.builder().record("b")
49 | .fields()
50 | .optionalString("x")
51 | .requiredLong("y")
52 | .endRecord()
53 |
54 | val schema = SchemaBuilder.builder().record("foo")
55 | .fields()
56 | .optionalString("a")
57 | .name("b").type(b).noDefault()
58 | .optionalInt("c")
59 | .endRecord()
60 |
61 | val json = """{ "data" : { "value": {"a": "hello", "b": { "x": "world", "y": 999 }, "c": 123} } }"""
62 | val row = frameToRecord(json, schema).getOrHandle { fail(it.toString()) }
63 | row.getObject(1) shouldBe "hello"
64 | row.getObject(2) shouldBe "world"
65 | row.getObject(3) shouldBe 999
66 | row.getObject(4) shouldBe 123
67 |
68 | shouldThrow { row.getObject(5) }
69 | }
70 |
71 | test("frame to record for nested schema with missing values") {
72 |
73 | val b = SchemaBuilder.builder().record("b")
74 | .fields()
75 | .optionalString("x")
76 | .requiredLong("y")
77 | .endRecord()
78 |
79 | val schema = SchemaBuilder.builder().record("foo")
80 | .fields()
81 | .optionalString("a")
82 | .name("b").type(b).noDefault()
83 | .optionalInt("c")
84 | .endRecord()
85 |
86 | val json = """{ "data" : { "value": {"a": "hello", "b": { "x": "world" }} } }"""
87 | val row = frameToRecord(json, schema).getOrHandle { fail(it.toString()) }
88 | row.getObject(1) shouldBe "hello"
89 | row.getObject(2) shouldBe "world"
90 | row.getObject(3) shouldBe null
91 | row.getObject(4) shouldBe null
92 |
93 | shouldThrow { row.getObject(5) }
94 | }
95 | }
96 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/JdbcRequestMessageTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import arrow.core.None
4 | import arrow.core.Some
5 | import io.kotlintest.shouldBe
6 | import io.kotlintest.specs.WordSpec
7 | import io.lenses.jdbc4.client.JdbcRequestMessage
8 |
9 | class JdbcRequestMessageTest : WordSpec() {
10 | init {
11 | "JdbcRequestMessage" should {
12 | "convert to and from json" {
13 | val msg = JdbcRequestMessage("SELECT * FROM abc", "token1")
14 | val json = JacksonSupport.toJson(msg)
15 | val actual = JacksonSupport.fromJson(json)
16 | actual shouldBe msg
17 | }
18 | }
19 | }
20 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/LDatabaseMetaDataTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import io.kotlintest.matchers.collections.shouldContain
4 | import io.kotlintest.matchers.collections.shouldContainAll
5 | import io.kotlintest.matchers.collections.shouldHaveSize
6 | import io.kotlintest.matchers.collections.shouldNotContain
7 | import io.kotlintest.matchers.gte
8 | import io.kotlintest.shouldBe
9 | import io.kotlintest.specs.WordSpec
10 | import io.lenses.jdbc4.resultset.resultSetList
11 | import io.lenses.jdbc4.resultset.toList
12 | import org.apache.avro.SchemaBuilder
13 | import org.apache.avro.generic.GenericData
14 | import org.apache.kafka.clients.producer.KafkaProducer
15 | import org.apache.kafka.clients.producer.ProducerRecord
16 | import java.sql.Connection
17 | import java.sql.DatabaseMetaData
18 |
19 | class LDatabaseMetaDataTest : WordSpec(), ProducerSetup {
20 |
21 | init {
22 |
23 | LensesDriver()
24 |
25 | val conn = conn()
26 | val topic1 = newTopicName()
27 | val topic2 = newTopicName()
28 |
29 | val taxiTopic = createTaxiTopic(conn)
30 |
31 | conn.createStatement().executeQuery("""
32 | CREATE TABLE $topic1 (_key int, id int, name string, quantity int, price double) FORMAT(INT, Avro) properties(partitions=3);
33 | CREATE TABLE $topic2 (_key int, id int, name string, quantity int, price double) FORMAT(INT, Json) properties(partitions=4);
34 | """.trimIndent())
35 |
36 |
37 | "LsqlDatabaseMetaDataTest" should {
38 | "declare support for multiple result sets" {
39 | conn.metaData.supportsMultipleResultSets() shouldBe true
40 | conn.metaData.supportsMultipleOpenResults() shouldBe false
41 | conn.metaData.supportsMultipleTransactions() shouldBe false
42 | }
43 | "declare support for joins" {
44 | conn.metaData.supportsFullOuterJoins() shouldBe false
45 | conn.metaData.supportsLimitedOuterJoins() shouldBe false
46 | conn.metaData.supportsOuterJoins() shouldBe false
47 | }
48 | "declare support for subqueries" {
49 | conn.metaData.supportsSubqueriesInIns() shouldBe true
50 | conn.metaData.supportsCorrelatedSubqueries() shouldBe false
51 | conn.metaData.supportsSubqueriesInComparisons() shouldBe false
52 | conn.metaData.supportsSubqueriesInExists() shouldBe false
53 | conn.metaData.supportsSubqueriesInQuantifieds() shouldBe false
54 | }
55 | "declare support for transactions" {
56 | conn.metaData.supportsTransactions() shouldBe false
57 | conn.metaData.supportsMultipleTransactions() shouldBe false
58 | conn.metaData.dataDefinitionIgnoredInTransactions() shouldBe false
59 | conn.metaData.defaultTransactionIsolation shouldBe Connection.TRANSACTION_NONE
60 | conn.metaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_READ_COMMITTED) shouldBe false
61 | conn.metaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_REPEATABLE_READ) shouldBe false
62 | conn.metaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_SERIALIZABLE) shouldBe false
63 | conn.metaData.supportsDataDefinitionAndDataManipulationTransactions() shouldBe false
64 | conn.metaData.supportsDataManipulationTransactionsOnly() shouldBe false
65 | }
66 | "return type info" {
67 |
68 | val string = resultSetList(conn.metaData.typeInfo).first { it[0] == "STRING" }
69 | string[1] shouldBe java.sql.Types.VARCHAR
70 | string[6] shouldBe DatabaseMetaData.typeNullable
71 | string[3] shouldBe '"'
72 | string[4] shouldBe '"'
73 |
74 | val long = resultSetList(conn.metaData.typeInfo).first { it[0] == "LONG" }
75 | long[1] shouldBe java.sql.Types.BIGINT
76 | long[6] shouldBe DatabaseMetaData.typeNullable
77 | long[3] shouldBe null
78 | long[4] shouldBe null
79 | }
80 | "return table types" {
81 | resultSetList(conn.metaData.tableTypes).map { it[0] } shouldBe listOf("SYSTEM", "USER")
82 | }
83 | "return all table names" {
84 | val tableNames = resultSetList(conn.metaData.getTables(null, null, null, null)).map { it[2] }
85 | tableNames.shouldContainAll(topic1, topic2)
86 | }
87 | "support table types when listing tables" {
88 | val tableNames = resultSetList(conn.metaData.getTables(null,
89 | null,
90 | null,
91 | arrayOf("USER"))).map { it[2].toString() }
92 | tableNames.shouldContain(topic1)
93 | tableNames.shouldContain(topic2)
94 | tableNames.shouldNotContain("__consumer_offsets")
95 |
96 | val systemTableNames = resultSetList(conn.metaData.getTables(null,
97 | null,
98 | null,
99 | arrayOf("SYSTEM"))).map { it[2].toString() }
100 | systemTableNames.shouldContain("__consumer_offsets")
101 | systemTableNames.shouldNotContain(topic1)
102 | systemTableNames.shouldNotContain(topic2)
103 | }
104 | "support table regex when listing tables" {
105 | // lets add some of our own tables and make sure they appear in the list of all
106 | val schema = SchemaBuilder.record("wibble").fields().requiredString("foo").endRecord()
107 | val producer = KafkaProducer(producerProps())
108 | val record = GenericData.Record(schema)
109 | record.put("foo", "a")
110 |
111 | producer.send(ProducerRecord("topicregex_dibble", "key1", record))
112 | producer.send(ProducerRecord("topicregex_dobble", "key1", record))
113 | producer.send(ProducerRecord("topicregex_dubble", "key1", record))
114 | producer.close()
115 |
116 | val tableNames = resultSetList(conn.metaData.getTables(null,
117 | null,
118 | "topicregex_d%",
119 | null)).map { it[2].toString() }
120 | tableNames.size shouldBe 3
121 | tableNames.shouldContainAll("topicregex_dibble", "topicregex_dobble", "topicregex_dubble")
122 | }
123 | "support listing columns with correct types" {
124 | val columns = conn.metaData.getColumns(null, null, null, null).toList()
125 | val currency = columns.filter { it[2] == taxiTopic }.first { it[3] == "VendorID" }
126 | currency[4] shouldBe java.sql.Types.OTHER
127 | currency[5] shouldBe "INT"
128 |
129 | val merchantId = columns.filter { it[2] == taxiTopic }.first { it[3] == "tpep_pickup_datetime" }
130 | merchantId[4] shouldBe java.sql.Types.OTHER
131 | merchantId[5] shouldBe "STRING"
132 |
133 | val blocked = columns.filter { it[2] == taxiTopic }.first { it[3] == "trip_distance" }
134 | blocked[4] shouldBe java.sql.Types.OTHER
135 | blocked[5] shouldBe "DOUBLE"
136 | }
137 | "support listing columns using table regex" {
138 | val columns = conn.metaData.getColumns(null, null, taxiTopic, null).toList()
139 | val currency = columns.filter { it[2] == taxiTopic }.first { it[3] == "VendorID" }
140 | currency[4] shouldBe java.sql.Types.OTHER
141 | currency[5] shouldBe "INT"
142 |
143 | val merchantId = columns.filter { it[2] == taxiTopic }.first { it[3] == "tpep_pickup_datetime" }
144 | merchantId[4] shouldBe java.sql.Types.OTHER
145 | merchantId[5] shouldBe "STRING"
146 |
147 | val blocked = columns.filter { it[2] == taxiTopic }.first { it[3] == "trip_distance" }
148 | blocked[4] shouldBe java.sql.Types.OTHER
149 | blocked[5] shouldBe "DOUBLE"
150 | }
151 | "support listing columns using column regex" {
152 | val columns = conn.metaData.getColumns(null, null, null, "VendorID").toList()
153 | val currency = columns.filter { it[2] == taxiTopic }.first { it[3] == "VendorID" }
154 | currency[4] shouldBe java.sql.Types.OTHER
155 | currency[5] shouldBe "INT"
156 | }
157 | "support listing columns using table and column regex" {
158 | val columns = conn.metaData.getColumns(null, null, taxiTopic, "VendorID").toList()
159 | val currency = columns.filter { it[2] == taxiTopic }.first { it[3] == "VendorID" }
160 | currency[4] shouldBe java.sql.Types.OTHER
161 | currency[5] shouldBe "INT"
162 | }
163 | "return versioning information" {
164 | conn.metaData.databaseMajorVersion shouldBe gte(1)
165 | conn.metaData.databaseMinorVersion shouldBe gte(1)
166 | conn.metaData.driverMajorVersion shouldBe 3
167 | conn.metaData.driverMinorVersion shouldBe 0
168 | conn.metaData.databaseProductName shouldBe Constants.ProductName
169 | conn.metaData.jdbcMajorVersion shouldBe 4
170 | conn.metaData.jdbcMinorVersion shouldBe 0
171 | }
172 | "not support batch updates" {
173 | conn.metaData.supportsBatchUpdates() shouldBe false
174 | }
175 | "be read only" {
176 | conn.metaData.isReadOnly shouldBe true
177 | }
178 | }
179 | }
180 |
181 | fun createTaxiTopic(conn: Connection): String {
182 | val topic = newTopicName()
183 | conn.createStatement().executeQuery("""
184 | create table $topic(
185 | _key string,
186 | VendorID int,
187 | tpep_pickup_datetime string,
188 | tpep_dropoff_datetime string,
189 | passenger_count int,
190 | trip_distance double,
191 | pickup_longitude double,
192 | pickup_latitude double,
193 | RateCodeID int,
194 | store_and_fwd_flag string,
195 | dropoff_longitude double,
196 | dropoff_latitude double,
197 | payment_type int,
198 | fare_amount double,
199 | extra double,
200 | mta_tax double,
201 | improvement_surcharge double,
202 | tip_amount double,
203 | tolls_amount double,
204 | total_amount double)
205 | format(string, avro)
206 | """.trimIndent()).toList().shouldHaveSize(1)
207 | return topic
208 | }
209 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/LStatementTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import io.kotlintest.shouldThrow
4 | import io.kotlintest.specs.ShouldSpec
5 | import java.sql.SQLFeatureNotSupportedException
6 | import java.sql.Statement
7 |
8 | class LStatementTest : ShouldSpec(), ProducerSetup {
9 | init {
10 |
11 | val conn = conn()
12 |
13 | should("throw exception for execute with auto generated columns") {
14 | shouldThrow {
15 | conn.createStatement().execute("select * from table", Statement.RETURN_GENERATED_KEYS)
16 | }
17 | shouldThrow {
18 | conn.createStatement().execute("select * from table", intArrayOf(1))
19 | }
20 | shouldThrow {
21 | conn.createStatement().execute("select * from table", arrayOf("a"))
22 | }
23 | }
24 |
25 | should("throw exception for transaction methods") {
26 | shouldThrow {
27 | conn.rollback()
28 | }
29 | shouldThrow {
30 | conn.commit()
31 | }
32 | shouldThrow {
33 | conn.setSavepoint()
34 | }
35 | }
36 |
37 | should("throw exception for blob methods") {
38 | shouldThrow {
39 | conn.createBlob()
40 | }
41 | shouldThrow {
42 | conn.createClob()
43 | }
44 | shouldThrow {
45 | conn.createNClob()
46 | }
47 | }
48 | }
49 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/LensesDriverTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import io.kotlintest.shouldBe
4 | import io.kotlintest.shouldThrow
5 | import io.kotlintest.specs.WordSpec
6 | import java.sql.SQLException
7 | import java.util.*
8 |
9 | class LensesDriverTest : WordSpec() {
10 | init {
11 | "LsqlDriver" should {
12 | "return required and optional properties for connections" {
13 | val props = LensesDriver().getPropertyInfo("any", Properties())
14 | props.map { it.name }.toSet() shouldBe setOf("user", "password", "weakssl")
15 | }
16 | "set user and password to required" {
17 | val props = LensesDriver().getPropertyInfo("any", Properties())
18 | props.first { it.name == "user" }.required shouldBe true
19 | props.first { it.name == "password" }.required shouldBe true
20 | }
21 | "set weakssl as optional" {
22 | val props = LensesDriver().getPropertyInfo("any", Properties())
23 | props.first { it.name == "weakssl" }.required shouldBe false
24 | }
25 | "accept valid single host url" {
26 | LensesDriver().acceptsURL("jdbc:lenses:kafka:http://localhost:3030") shouldBe true
27 | }
28 | "accept valid multiple host url" {
29 | LensesDriver().acceptsURL("jdbc:lenses:kafka:http://localhost:3030,http://localhost:3031") shouldBe true
30 | }
31 | "parse parameters from url" {
32 | val (url, props) = LensesDriver().parseUrl("jdbc:lenses:kafka:http://localhost:3030,http://localhost:3031?user=admin&wibble=wobble")
33 | url shouldBe "jdbc:lenses:kafka:http://localhost:3030,http://localhost:3031"
34 | props["user"] shouldBe "admin"
35 | props["wibble"] shouldBe "wobble"
36 | }
37 | "reject invalid url" {
38 | LensesDriver().acceptsURL("jdbc:qqqqq") shouldBe false
39 | }
40 | "throw return null for connect when the url is invalid" {
41 | LensesDriver().connect("jdbc:wibble", Properties()) shouldBe null
42 | }
43 | "throw for connection when the url is null" {
44 | shouldThrow {
45 | LensesDriver().connect(null, Properties())
46 | }
47 | }
48 | "require each url to be http or https" {
49 | shouldThrow {
50 | LensesDriver().connect("jdbc:lenses:kafka:httpq://localhost:3030", Properties())
51 | }
52 | }
53 | }
54 | }
55 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/NormalizeRecordTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import io.kotlintest.shouldBe
4 | import io.kotlintest.specs.FunSpec
5 | import org.apache.avro.SchemaBuilder
6 |
7 | class NormalizeRecordTest : FunSpec() {
8 | init {
9 |
10 | test("normalizing flat schema with all values") {
11 | val schema = SchemaBuilder.builder().record("foo")
12 | .fields()
13 | .optionalString("a")
14 | .requiredBoolean("b")
15 | .optionalInt("c")
16 | .endRecord()
17 | val node = JacksonSupport.mapper.readTree("""{"a":"hello", "b":true, "c": 123}""")
18 | normalizeRecord(schema, node) shouldBe listOf("a" to "hello", "b" to true, "c" to 123)
19 | }
20 |
21 | test("normalizing flat schema with all values unordered") {
22 | val schema = SchemaBuilder.builder().record("foo")
23 | .fields()
24 | .optionalString("a")
25 | .requiredBoolean("b")
26 | .optionalInt("c")
27 | .endRecord()
28 | val node = JacksonSupport.mapper.readTree("""{"a":"hello", "c":123, "b": true}""")
29 | normalizeRecord(schema, node) shouldBe listOf("a" to "hello", "b" to true, "c" to 123)
30 | }
31 |
32 | test("normalizing flat schema with missing values") {
33 | val schema = SchemaBuilder.builder().record("foo")
34 | .fields()
35 | .optionalString("a")
36 | .requiredBoolean("b")
37 | .optionalInt("c")
38 | .endRecord()
39 | val node = JacksonSupport.mapper.readTree("""{"a":"hello", "c":123}""")
40 | normalizeRecord(schema, node) shouldBe listOf("a" to "hello", "b" to null, "c" to 123)
41 | }
42 |
43 | test("normalizing flat schema with missing values unordered") {
44 | val schema = SchemaBuilder.builder().record("foo")
45 | .fields()
46 | .optionalString("a")
47 | .requiredBoolean("b")
48 | .optionalInt("c")
49 | .endRecord()
50 | val node = JacksonSupport.mapper.readTree("""{"c":123, "a":"hello"}""")
51 | normalizeRecord(schema, node) shouldBe listOf("a" to "hello", "b" to null, "c" to 123)
52 | }
53 |
54 | test("normalizing nested schema with all values") {
55 |
56 | val b = SchemaBuilder.builder().record("b")
57 | .fields()
58 | .optionalString("x")
59 | .requiredLong("y")
60 | .endRecord()
61 |
62 | val schema = SchemaBuilder.builder().record("foo")
63 | .fields()
64 | .optionalString("a")
65 | .name("b").type(b).noDefault()
66 | .optionalInt("c")
67 | .endRecord()
68 | val node = JacksonSupport.mapper.readTree("""{"a": "hello", "b": { "x": "world", "y": 999 }, "c": 123}""")
69 | normalizeRecord(schema, node) shouldBe listOf("a" to "hello", "b.x" to "world", "b.y" to 999, "c" to 123)
70 | }
71 |
72 | test("normalizing nested schema with missing values") {
73 |
74 | val b = SchemaBuilder.builder().record("b")
75 | .fields()
76 | .optionalString("x")
77 | .requiredLong("y")
78 | .endRecord()
79 |
80 | val schema = SchemaBuilder.builder().record("foo")
81 | .fields()
82 | .optionalString("a")
83 | .name("b").type(b).noDefault()
84 | .optionalInt("c")
85 | .endRecord()
86 | val node = JacksonSupport.mapper.readTree("""{"a": "hello", "b": { "x": "world" }}""")
87 | normalizeRecord(schema, node) shouldBe listOf("a" to "hello", "b.x" to "world", "b.y" to null, "c" to null)
88 | }
89 |
90 | test("normalizing nested schema with missing parents") {
91 |
92 | val b = SchemaBuilder.builder().record("b")
93 | .fields()
94 | .optionalString("x")
95 | .requiredLong("y")
96 | .endRecord()
97 |
98 | val schema = SchemaBuilder.builder().record("foo")
99 | .fields()
100 | .optionalString("a")
101 | .name("b").type(b).noDefault()
102 | .optionalInt("c")
103 | .endRecord()
104 | val node = JacksonSupport.mapper.readTree("""{"a": "hello"}""")
105 | normalizeRecord(schema, node) shouldBe listOf("a" to "hello", "b.x" to null, "b.y" to null, "c" to null)
106 | }
107 | }
108 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/PrecisionQueryTest.kt:
--------------------------------------------------------------------------------
1 | //package io.lenses.jdbc4
2 | //
3 | //import io.kotlintest.shouldBe
4 | //import io.kotlintest.specs.WordSpec
5 | //import io.lenses.jdbc4.data.EquitiesData
6 | //import java.sql.DriverManager
7 | //
8 | //class PrecisionQueryTest : WordSpec(), EquitiesData {
9 | //
10 | // init {
11 | //
12 | // io.lenses.jdbc4.LDriver()
13 | // val topic = try {
14 | // populateEquities()
15 | // } catch (e: Throwable) {
16 | // e.printStackTrace()
17 | // throw e
18 | // }
19 | //
20 | // val q = "SELECT * FROM $topic"
21 | // val conn = DriverManager.getConnection("jdbc:lenses:kafka:http://localhost:3030", "admin", "admin")
22 | // val stmt = conn.createStatement()
23 | //
24 | // "JDBC Driver" should {
25 | // "use unlimited precision for strings" {
26 | // val rs = stmt.executeQuery(q)
27 | // rs.metaData.columnCount shouldBe 6
28 | // rs.metaData.getColumnLabel(1) shouldBe "ticker"
29 | // rs.metaData.getColumnLabel(2) shouldBe "name"
30 | // rs.metaData.getColumnLabel(3) shouldBe "price"
31 | // rs.metaData.getColumnLabel(4) shouldBe "float"
32 | // rs.metaData.getColumnLabel(5) shouldBe "sector"
33 | // rs.metaData.getColumnLabel(6) shouldBe "yield"
34 | // rs.next()
35 | // // our strings are unlimited
36 | // rs.metaData.getPrecision(2) shouldBe Int.MAX_VALUE
37 | // }
38 | // "support precision for logical decimals backed by bytes" {
39 | // val rs = stmt.executeQuery(q)
40 | // rs.next()
41 | // rs.metaData.getPrecision(3) shouldBe 10
42 | // }
43 | // "support precision for fixed types" {
44 | // val rs = stmt.executeQuery(q)
45 | // rs.next()
46 | // rs.metaData.getPrecision(1) shouldBe 4
47 | // }
48 | // "return 0 for other numerical types" {
49 | // val rs = stmt.executeQuery(q)
50 | // rs.next()
51 | // rs.metaData.getPrecision(4) shouldBe 0
52 | // rs.metaData.getPrecision(6) shouldBe 0
53 | // }
54 | // }
55 | // }
56 | //}
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/PreparedInsertTest.kt:
--------------------------------------------------------------------------------
1 | //package io.lenses.jdbc4
2 | //
3 | //import io.kotlintest.shouldBe
4 | //import io.kotlintest.shouldThrow
5 | //import io.kotlintest.specs.WordSpec
6 | //import java.sql.DriverManager
7 | //
8 | //import java.sql.SQLException
9 | //
10 | //class PreparedInsertTest : WordSpec(), MovieData {
11 | //
12 | // init {
13 | //
14 | // io.lenses.jdbc4.LDriver()
15 | // val topic = populateMovies()
16 | //
17 | // val conn = DriverManager.getConnection("jdbc:lenses:kafka:http://localhost:3030", "admin", "admin")
18 | //
19 | // "JDBC Driver" should {
20 | // "support prepared statements" {
21 | // val sql = "SET _ktype='STRING';INSERT INTO cc_data (customerFirstName, number, currency, customerLastName, country, blocked) values (?,?,?,?,?,?)"
22 | // val stmt = conn.prepareStatement(sql)
23 | // stmt.setString(1, "sammy")
24 | // stmt.setString(2, "4191005000501123")
25 | // stmt.setString(3, "GBP")
26 | // stmt.setString(4, "smith")
27 | // stmt.setString(5, "UK")
28 | // stmt.setBoolean(6, false)
29 | // stmt.execute() shouldBe true
30 | // }
31 | // "!support nested parameters" {
32 | // val sql = "INSERT INTO `$topic`(name, `year`, director, `imdb`.`url`, `imdb`.`ranking`, `imdb`.`rating`) values (?,?,?,?,?,?)"
33 | // val stmt = conn.prepareStatement(sql)
34 | // stmt.setString(1, "Batman Begins")
35 | // stmt.setInt(2, 2005)
36 | // stmt.setString(3, "christopher nolan")
37 | // stmt.setString(4, "https://www.imdb.com/title/tt0372784/")
38 | // stmt.setInt(5, 211)
39 | // stmt.setDouble(6, 8.3)
40 | // stmt.execute() shouldBe true
41 | // }
42 | // "throw an exception if incorrect number of placeholders" {
43 | // val sql = "INSERT INTO cc_data (customerFirstName, number, currency, customerLastName, country, blocked) values (?,?)"
44 | // shouldThrow {
45 | // conn.prepareStatement(sql)
46 | // }
47 | // }
48 | // "throw an exception trying to set a parameter out of range" {
49 | // val sql = "INSERT INTO cc_data (customerFirstName, number, currency, customerLastName, country, blocked) values (?,?,?,?,?,?)"
50 | // val stmt = conn.prepareStatement(sql)
51 | // shouldThrow {
52 | // stmt.setString(0, "wibble")
53 | // }
54 | // shouldThrow {
55 | // stmt.setString(7, "wibble")
56 | // }
57 | // }
58 | // "throw an exception if a parameter is not set" {
59 | // val sql = "INSERT INTO cc_data (customerFirstName, number, currency, customerLastName, country, blocked) values (?,?,?,?,?,?)"
60 | // val stmt = conn.prepareStatement(sql)
61 | // shouldThrow {
62 | // stmt.execute()
63 | // }
64 | // }
65 | // "throw an exception if a nested parameter is not set" {
66 | //
67 | // }
68 | // "return parameter info for prepared statements" {
69 | // val sql = "INSERT INTO cc_data (customerFirstName, number, currency, customerLastName, country, blocked) values (?,?,?,?,?,?)"
70 | // val stmt = conn.prepareStatement(sql)
71 | // val meta = stmt.parameterMetaData
72 | // meta.parameterCount shouldBe 6
73 | // meta.getParameterClassName(1) shouldBe "java.lang.String"
74 | // meta.getParameterClassName(2) shouldBe "java.lang.String"
75 | // meta.getParameterClassName(3) shouldBe "java.lang.String"
76 | // meta.getParameterClassName(4) shouldBe "java.lang.String"
77 | // meta.getParameterClassName(5) shouldBe "java.lang.String"
78 | // meta.getParameterClassName(6) shouldBe "java.lang.Boolean"
79 | // }
80 | // }
81 | // }
82 | //}
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/ProducerSetup.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4
2 |
3 | import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient
4 | import io.lenses.jdbc4.resultset.toList
5 | import io.lenses.jdbc4.util.Logging
6 | import org.apache.avro.Schema
7 | import org.apache.avro.generic.GenericData
8 | import org.apache.kafka.clients.admin.AdminClient
9 | import org.apache.kafka.clients.admin.Config
10 | import org.apache.kafka.clients.admin.ConfigEntry
11 | import org.apache.kafka.clients.admin.NewTopic
12 | import org.apache.kafka.clients.producer.KafkaProducer
13 | import org.apache.kafka.common.config.ConfigResource
14 | import org.apache.kafka.common.config.TopicConfig
15 | import java.sql.Connection
16 | import java.sql.DriverManager
17 | import java.util.*
18 | import java.util.concurrent.TimeUnit
19 |
20 | interface ProducerSetup : Logging {
21 |
22 | fun conn(): Connection {
23 | LensesDriver()
24 | return DriverManager.getConnection("jdbc:lenses:kafka:http://localhost:3030", "admin", "admin")
25 | }
26 |
27 | fun schemaClient() = CachedSchemaRegistryClient("http://127.0.0.1:8081", 1000)
28 |
29 | fun registerValueSchema(topic: String, schema: Schema) {
30 | val client = schemaClient()
31 | val valueTopic = "$topic-value"
32 | client.register(valueTopic, schema)
33 | logger.debug("Schema registered at $valueTopic")
34 | }
35 |
36 | fun newTopicName() = "topic_" + UUID.randomUUID().toString().replace('-', '_')
37 |
38 | fun createTopic(topicName: String, compactMode: String = TopicConfig.CLEANUP_POLICY_COMPACT): String {
39 | createAdmin().use {
40 |
41 | logger.debug("Creating topic $topicName")
42 | it.createTopics(listOf(NewTopic(topicName, 1, 1))).all().get(10, TimeUnit.SECONDS)
43 |
44 | it.alterConfigs(mapOf(
45 | ConfigResource(ConfigResource.Type.TOPIC, topicName) to Config(
46 | listOf(ConfigEntry(TopicConfig.CLEANUP_POLICY_CONFIG, compactMode))
47 | )
48 | )).all().get()
49 |
50 | logger.debug("Closing admin client")
51 | it.close(10, TimeUnit.SECONDS)
52 | }
53 |
54 | conn().metaData.getTables(null, null, null, null).toList().map { it[2] }.contains(topicName)
55 |
56 | return topicName
57 | }
58 |
59 | fun createTopic(): String = createTopic(newTopicName())
60 |
61 | fun adminProps() = Properties().apply {
62 | this["bootstrap.servers"] = "PLAINTEXT://127.0.0.1:9092"
63 | }
64 |
65 | fun createAdmin(): AdminClient = AdminClient.create(adminProps())
66 |
67 | fun producerProps() = Properties().apply {
68 | this["bootstrap.servers"] = "PLAINTEXT://127.0.0.1:9092"
69 | this["acks"] = "all"
70 | this["retries"] = 0
71 | this["batch.size"] = 16384
72 | this["linger.ms"] = 1
73 | this["buffer.memory"] = 33554432
74 | this["key.serializer"] = io.confluent.kafka.serializers.KafkaAvroSerializer::class.java.canonicalName
75 | this["value.serializer"] = io.confluent.kafka.serializers.KafkaAvroSerializer::class.java.canonicalName
76 | this["schema.registry.url"] = "http://127.0.0.1:8081"
77 | }
78 |
79 | fun createProducer() = KafkaProducer(producerProps())
80 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/RestClientTest.kt:
--------------------------------------------------------------------------------
1 | //package io.lenses.jdbc4
2 | //
3 | //import fi.iki.elonen.NanoHTTPD
4 | //import io.kotlintest.Description
5 | //import io.kotlintest.Spec
6 | //import io.kotlintest.shouldBe
7 | //import io.kotlintest.shouldThrow
8 | //import io.kotlintest.specs.WordSpec
9 | //import io.lenses.jdbc4.client.RestClient
10 | //import io.lenses.jdbc4.client.Credentials
11 | //import javax.net.ssl.SSLHandshakeException
12 | //
13 | //class RestClientTest : WordSpec() {
14 | //
15 | // class LoginServer : NanoHTTPD(61864) {
16 | // override fun serve(session: IHTTPSession): Response {
17 | // return newFixedLengthResponse("""wibble""".trimIndent())
18 | // }
19 | // }
20 | //
21 | // private val server = LoginServer()
22 | //
23 | // override fun beforeSpec(description: Description, spec: Spec) {
24 | // server.makeSecure(NanoHTTPD.makeSSLSocketFactory("/keystore.jks", "password".toCharArray()), null)
25 | // server.start(NanoHTTPD.SOCKET_READ_TIMEOUT, false)
26 | // }
27 | //
28 | // override fun afterSpec(description: Description, spec: Spec) {
29 | // server.stop()
30 | // }
31 | //
32 | // init {
33 | // "RestClient" should {
34 | // "support self signed certificates if weak ssl is set to true" {
35 | // val client = RestClient(listOf("https://localhost:61864"), Credentials("any", "any"), true)
36 | // client.token shouldBe "wibble"
37 | // }
38 | // "reject self signed certificates if weak ssl is set to false" {
39 | // shouldThrow {
40 | // RestClient(listOf("https://localhost:61864"), Credentials("any", "any"), false)
41 | // }
42 | // }
43 | // }
44 | // }
45 | //}
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/ScaleQueryTest.kt:
--------------------------------------------------------------------------------
1 | //package io.lenses.jdbc4
2 | //
3 | //import io.kotlintest.shouldBe
4 | //import io.kotlintest.specs.WordSpec
5 | //import org.apache.avro.LogicalTypes
6 | //import org.apache.avro.SchemaBuilder
7 | //import org.apache.avro.generic.GenericData
8 | //import org.apache.kafka.clients.producer.KafkaProducer
9 | //import org.apache.kafka.clients.producer.ProducerRecord
10 | //import java.math.BigDecimal
11 | //import java.nio.ByteBuffer
12 | //import java.sql.DriverManager
13 | //import java.util.*
14 | //
15 | //class ScaleQueryTest : WordSpec(), ProducerSetup {
16 | //
17 | // val topic = "topic_" + UUID.randomUUID().toString().replace('-', '_')
18 | //
19 | // fun populateEquities() {
20 | //
21 | // val amount = SchemaBuilder.builder().bytesType()
22 | // LogicalTypes.decimal(4, 3).addToSchema(amount)
23 | //
24 | // val schema = SchemaBuilder.record("equity").fields()
25 | // .name("a").type(amount).noDefault()
26 | // .endRecord()
27 | //
28 | // val producer = KafkaProducer(producerProps())
29 | // val record = GenericData.Record(schema)
30 | // record.put("a", ByteBuffer.wrap(BigDecimal(12.34).unscaledValue().toByteArray()))
31 | // producer.send(ProducerRecord(topic, "key1", record))
32 | // }
33 | //
34 | // init {
35 | //
36 | // io.lenses.jdbc4.LDriver()
37 | // populateEquities()
38 | //
39 | // val q = "SELECT * FROM $topic WHERE _ktype=STRING AND _vtype=AVRO"
40 | // val conn = DriverManager.getConnection("jdbc:lenses:kafka:http://localhost:3030", "admin", "admin")
41 | // val stmt = conn.createStatement()
42 | //
43 | // "JDBC Driver" should {
44 | // "support scale for logical decimals backed by bytes" {
45 | // val rs = stmt.executeQuery(q)
46 | // rs.next()
47 | // rs.metaData.getScale(1) shouldBe 3
48 | // }
49 | // }
50 | // }
51 | //}
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/data/CCData.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.data
2 |
3 | import java.util.*
4 |
5 | interface CCData {
6 |
7 | val countries: List
8 | get() = listOf("UK", "US", "DE", "ES", "FR")
9 |
10 | val currencies: List
11 | get() = listOf("GBP", "USD", "AUD", "YEN", "EUR")
12 |
13 | val surnames: List
14 | get() = listOf("picard", "riker", "troi", "crusher", "yar", "la forge", "son of mogh", "obrien", "soong")
15 | val firstnames: List
16 | get() = listOf("jean luc", "william", "deanna", "beverley", "tasha", "geordi", "worf", "wesley", "miles", "data")
17 |
18 | data class CardData(val country: String,
19 | val currency: String,
20 | val surname: String,
21 | val firstname: String,
22 | val blocked: Boolean,
23 | val number: String)
24 |
25 | fun randomElement(list: List): T = list[Random().nextInt(list.size)]
26 |
27 | fun randomCountry() = randomElement(countries)
28 | fun randomCurrency() = randomElement(currencies)
29 | fun randomFirstName() = randomElement(firstnames)
30 | fun randomSurname() = randomElement(surnames)
31 | fun randomCardNumber() = IntArray(16, { _ -> Random().nextInt(9) }).joinToString("")
32 |
33 | fun generateCC() = CardData(randomCountry(),
34 | randomCurrency(),
35 | randomSurname(),
36 | randomFirstName(),
37 | Random().nextBoolean(),
38 | randomCardNumber())
39 |
40 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/data/EquitiesData.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.data
2 |
3 | import io.lenses.jdbc4.ProducerSetup
4 | import io.lenses.jdbc4.util.Logging
5 | import org.apache.avro.LogicalTypes
6 | import org.apache.avro.SchemaBuilder
7 | import org.apache.avro.generic.GenericData
8 | import org.apache.kafka.clients.producer.KafkaProducer
9 | import org.apache.kafka.clients.producer.ProducerRecord
10 | import java.math.BigDecimal
11 | import java.nio.ByteBuffer
12 |
13 | interface EquitiesData : ProducerSetup, Logging {
14 |
15 | data class Equity(val ticker: String,
16 | val name: String,
17 | val price: BigDecimal,
18 | val float: Int,
19 | val sector: String,
20 | val yield: Double?)
21 |
22 | fun populateEquities(): String {
23 |
24 | val topic = createTopic()
25 | val equities = listOf(
26 | Equity("goog", "Alphabet", BigDecimal(99.11), 12455235, "Tech", 2.3),
27 | Equity("bpop",
28 | "Banco Popular",
29 | BigDecimal(15.34),
30 | 5634643,
31 | "Financials",
32 | 4.4),
33 | Equity("aapl", "Apple", BigDecimal(13.03), 82346, "Tech", null)
34 | )
35 |
36 | val foo = SchemaBuilder.fixed("foo").size(8)
37 | val amount = SchemaBuilder.builder().bytesType()
38 | val ticker = SchemaBuilder.fixed("ticker").size(4)
39 | LogicalTypes.decimal(10, 4).addToSchema(amount)
40 |
41 | val schema = SchemaBuilder.record("equity").fields()
42 | .name("ticker").type(ticker).noDefault()
43 | .requiredString("name")
44 | .name("price").type(amount).noDefault()
45 | .requiredInt("float")
46 | .requiredString("sector")
47 | .optionalDouble("yield")
48 | .endRecord()
49 |
50 | val producer = KafkaProducer(producerProps())
51 | for (equity in equities) {
52 | logger.debug("Populating with $equity")
53 |
54 | val fixed = GenericData.Fixed(foo)
55 | fixed.bytes(byteArrayOf(1, 2, 3, 4, 5))
56 |
57 | val record = GenericData.Record(schema)
58 | record.put("ticker", GenericData.Fixed(ticker, equity.ticker.toByteArray()))
59 | record.put("name", equity.name)
60 | record.put("price", ByteBuffer.wrap(equity.price.unscaledValue().toByteArray()))
61 | record.put("float", equity.float)
62 | record.put("sector", equity.sector)
63 | record.put("yield", equity.yield)
64 | producer.send(ProducerRecord(topic, record))
65 | }
66 | logger.debug("Population of equities completed")
67 |
68 | return topic
69 | }
70 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/data/LocationData.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.data
2 |
3 | import io.lenses.jdbc4.ProducerSetup
4 | import io.lenses.jdbc4.util.Logging
5 | import org.apache.avro.Schema
6 | import org.apache.avro.SchemaBuilder
7 | import org.apache.avro.generic.GenericData
8 | import org.apache.kafka.clients.producer.ProducerRecord
9 | import java.util.*
10 |
11 | interface LocationData : ProducerSetup, Logging {
12 |
13 | val random: Random
14 |
15 | data class Address(val street: String, val number: Int, val zip: Int, val state: String)
16 | data class Geo(val lat: Double, val lon: Double)
17 | data class Location(val id: Long, val address: Address, val geo: Geo)
18 |
19 | val states: List
20 | get() = listOf("Texas", "Utah", "Lousiana", "Hawaii", "New York", "California", "Oregon", "Iowa", "Montana", "Florida", "Georgia", "Maryland", "Oklahoma", "Washington", "Vermont", "Ohio", "Idaho", "Nebraska")
21 |
22 | val streets: List
23 | get() = listOf("Baker", "Picadilly", "Northern", "Royal", "Oak", "Finchley", "St Johns", "St Pauls", "St Peters", "St Marks", "Fleet", "Hampshire", "Marylebone", "Farringdon")
24 |
25 | val endings: List
26 | get() = listOf("Road", "Street", "Avenue", "Close", "Drive", "Highway", "Walk", "West", "East")
27 |
28 | fun randomElement(list: List): T = list[random.nextInt(list.size)]
29 |
30 | fun randomGeo(): Geo = Geo(random.nextDouble(),
31 | random.nextDouble())
32 | fun randomZipCode(): Int = random.nextInt(89999) + 10000
33 | fun randomStreet(): String = randomElement(streets) + " " + randomElement(endings)
34 |
35 | fun randomState() = randomElement(states)
36 | fun randomAddress(): Address = Address(
37 | randomStreet(),
38 | random.nextInt(9999),
39 | randomZipCode(),
40 | randomState())
41 | fun randomLocation() = Location(random.nextLong(), randomAddress(), randomGeo())
42 |
43 | fun addressSchema(): Schema = SchemaBuilder.record("address").fields()
44 | .requiredString("street")
45 | .requiredInt("number")
46 | .requiredInt("zip")
47 | .requiredString("state")
48 | .endRecord()
49 |
50 | fun geoSchema(): Schema = SchemaBuilder.record("geo").fields()
51 | .requiredDouble("lat")
52 | .requiredDouble("lon")
53 | .endRecord()
54 |
55 | fun schema(): Schema = SchemaBuilder.record("location").fields()
56 | .requiredLong("id")
57 | .name("address").type(addressSchema()).noDefault()
58 | .name("geo").type(geoSchema()).noDefault()
59 | .endRecord()
60 |
61 | fun record(location: Location): GenericData.Record {
62 | val address = GenericData.Record(addressSchema())
63 | address.put("street", location.address.street)
64 | address.put("number", location.address.number)
65 | address.put("zip", location.address.zip)
66 | address.put("state", location.address.state)
67 |
68 | val geo = GenericData.Record(geoSchema())
69 | geo.put("lat", location.geo.lat)
70 | geo.put("lon", location.geo.lon)
71 |
72 | val record = GenericData.Record(schema())
73 | record.put("id", random.nextLong())
74 | record.put("address", address)
75 | record.put("geo", geo)
76 |
77 | return record
78 | }
79 |
80 | fun populateLocations(): String {
81 | val topic = createTopic()
82 | val locations = List(5, { _ -> randomLocation() })
83 | registerValueSchema(topic, schema())
84 | val producer = super.createProducer()
85 |
86 | for (location in locations) {
87 | val record = record(location)
88 | logger.info("Populating location $record")
89 | producer.send(ProducerRecord(topic, record.get("id").toString(), record))
90 | }
91 |
92 | logger.debug("Closing producer")
93 | producer.close()
94 |
95 | return topic
96 | }
97 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/data/MovieData.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.data
2 |
3 | import io.kotlintest.matchers.collections.shouldHaveSize
4 | import io.lenses.jdbc4.ProducerSetup
5 | import io.lenses.jdbc4.resultset.toList
6 | import io.lenses.jdbc4.util.Logging
7 | import org.apache.kafka.common.config.TopicConfig
8 | import java.sql.Connection
9 |
10 | interface MovieData : ProducerSetup, Logging {
11 |
12 | fun populateMovies(conn: Connection, compacted: Boolean = true): String {
13 |
14 | val topic = createTopic(newTopicName(), if (compacted) TopicConfig.CLEANUP_POLICY_COMPACT else TopicConfig.CLEANUP_POLICY_DELETE)
15 | conn.createStatement().executeQuery("""
16 | create TABLE if not EXISTS
17 | $topic(
18 | _key string,
19 | name string,year int,
20 | director string,
21 | imdb.url string,
22 | imdb.ranking int,
23 | imdb.rating double)
24 | format (string, avro)
25 | properties(compacted=$compacted);""".trimIndent()
26 | ).toList().shouldHaveSize(1)
27 |
28 | conn.createStatement().executeUpdate("""
29 | insert into $topic(_key, name, year, director, imdb.url, imdb.ranking, imdb.rating)
30 | VALUES
31 | ("Shawshank Redemption","Shawshank Redemption", 1998, "Frank Darabont", "http://www.imdb.com/title/tt0111161", 144, 9.2),
32 | ("The Good, The Bad and the Ugly","The Good, The Bad and the Ugly", 1968, "Sergio Leone","", 1, 8.8),
33 | ("Interstellar","Interstellar", 2017, "Chris Nolan", "http://www.imdb.com/title/tt0816692", 30, 8.5)
34 | """.trimIndent())
35 |
36 | return topic
37 | }
38 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/data/Samples.java:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.data;
2 |
3 | import java.sql.Connection;
4 | import java.sql.DatabaseMetaData;
5 | import java.sql.DriverManager;
6 | import java.sql.PreparedStatement;
7 | import java.sql.ResultSet;
8 | import java.sql.ResultSetMetaData;
9 | import java.sql.SQLException;
10 | import java.sql.Statement;
11 | import java.util.Arrays;
12 | import java.util.HashMap;
13 | import java.util.List;
14 | import java.util.Map;
15 | import java.util.Properties;
16 |
17 | public class Samples {
18 |
19 | Connection conn = DriverManager.getConnection("jdbc:lenses:kafka:https://localhost:3030", "user", "pass");
20 |
21 | public Samples() throws SQLException {
22 | }
23 |
24 | public void preparedInsert() throws SQLException {
25 | Connection conn = DriverManager.getConnection("jdbc:lenses:kafka:https://localhost:3030", "user", "pass");
26 | PreparedStatement stmt = conn.prepareStatement("INSERT INTO mytopic (name, city, lat, long) VALUES (?, ?, ?, ?)");
27 | stmt.setString(1, "Tyrian Lannister");
28 | stmt.setString(2, "Kings Landing");
29 | stmt.setDouble(3, 67.5);
30 | stmt.setDouble(4, -41.2);
31 | stmt.execute();
32 | }
33 |
34 | public void loopedPreparedInsert() throws SQLException {
35 |
36 | List characters = Arrays.asList("Tyrian Lannister", "Cersei Lannister", "Tywin Lannister");
37 |
38 | Connection conn = DriverManager.getConnection("jdbc:lenses:kafka:https://localhost:3030", "user", "pass");
39 | PreparedStatement stmt = conn.prepareStatement("INSERT INTO mytopic (name, city, lat, long) VALUES (?, ?, ?, ?)");
40 | stmt.setString(2, "Kings Landing");
41 | stmt.setDouble(3, 67.5);
42 | stmt.setDouble(4, -41.2);
43 |
44 | for (String character : characters) {
45 | stmt.setString(1, character);
46 | stmt.execute();
47 | }
48 |
49 | stmt.close();
50 | }
51 |
52 | public void batchedLoopedPreparedInsert() throws SQLException {
53 |
54 | List characters = Arrays.asList("Tyrian Lannister", "Cersei Lannister", "Tywin Lannister");
55 |
56 | Connection conn = DriverManager.getConnection("jdbc:lenses:kafka:https://localhost:3030", "user", "pass");
57 | PreparedStatement stmt = conn.prepareStatement("INSERT INTO mytopic (name, city, lat, long) VALUES (?, ?, ?, ?)");
58 | stmt.setString(2, "Kings Landing");
59 | stmt.setDouble(3, 67.5);
60 | stmt.setDouble(4, -41.2);
61 |
62 | for (String character : characters) {
63 | stmt.setString(1, character);
64 | stmt.addBatch();
65 | }
66 |
67 | stmt.executeBatch();
68 | stmt.close();
69 | }
70 |
71 | public void metadata() throws SQLException {
72 | Connection conn = DriverManager.getConnection("jdbc:lenses:kafka:https://localhost:3030", "user", "pass");
73 | Statement stmt = conn.createStatement();
74 | ResultSet rs = stmt.executeQuery("SELECT * FROM mytopic");
75 | ResultSetMetaData meta = rs.getMetaData();
76 | for (int k = 1; k <= meta.getColumnCount(); k++) {
77 | System.out.println("ColumnName=" + meta.getColumnName(k));
78 | System.out.println("ColumnType=" + meta.getColumnTypeName(k));
79 | System.out.println("Nullability=" + meta.isNullable(k));
80 | System.out.println("Signed=" + meta.isSigned(k));
81 | System.out.println("Precision=" + meta.getPrecision(k));
82 | System.out.println("Scale=" + meta.getScale(k));
83 | }
84 | }
85 |
86 | public void resultSetWhile() throws SQLException {
87 |
88 | Connection conn = DriverManager.getConnection(
89 | "jdbc:lenses:kafka:http://localhost:3030",
90 | "username",
91 | "pasword");
92 |
93 | Statement stmt = conn.createStatement();
94 |
95 | ResultSet rs = stmt.executeQuery("SELECT * FROM mytopic WHERE _ktype='STRING' AND _vtype='AVRO'");
96 |
97 | while (rs.next()) {
98 | System.out.println(rs.getString("name"));
99 | System.out.println(rs.getInt("age"));
100 | System.out.println(rs.getString("location"));
101 | }
102 | }
103 |
104 | public void resultSetOffset() throws SQLException {
105 | Connection conn = DriverManager.getConnection(
106 | "jdbc:lenses:kafka:http://localhost:3030",
107 | "username",
108 | "pasword");
109 |
110 | Statement stmt = conn.createStatement();
111 | ResultSet rs = stmt.executeQuery("SELECT * FROM mytopic WHERE _ktype='STRING' AND _vtype='AVRO'");
112 |
113 | rs.last();
114 | System.out.println(rs.getString("name"));
115 |
116 | rs.first();
117 | System.out.println(rs.getString("name"));
118 |
119 | while (rs.next()) {
120 | System.out.println(rs.getString("name"));
121 | System.out.println(rs.getInt("age"));
122 | System.out.println(rs.getString("location"));
123 | }
124 | }
125 |
126 | public void tableMeta() throws SQLException {
127 | DatabaseMetaData meta = conn.getMetaData();
128 | ResultSet rs = meta.getTables(null, null, "sometable", null);
129 | while (rs.next()) {
130 | System.out.println("Table=" + rs.getString(3));
131 | System.out.println("Type=" + rs.getString(4));
132 | }
133 | }
134 |
135 | public void columnMeta() throws SQLException {
136 | DatabaseMetaData meta = conn.getMetaData();
137 | ResultSet rs = meta.getColumns(null, null, "sometable", "name*");
138 | while (rs.next()) {
139 | System.out.println("Table=" + rs.getString(3));
140 | System.out.println("Column=" + rs.getString(4));
141 | System.out.println("Datatype=" + rs.getString(5));
142 | }
143 | }
144 |
145 | public void weakConnection() throws SQLException {
146 | Properties props = new Properties();
147 | props.setProperty("user", "myuser");
148 | props.setProperty("password", "mypass");
149 | props.setProperty("weakssl", "true");
150 | Connection conn = DriverManager.getConnection(
151 | "jdbc:lenses:kafka:http://localhost:3030",
152 | props);
153 | }
154 |
155 | public void nestedExample() throws SQLException {
156 | Connection conn = DriverManager.getConnection(
157 | "jdbc:lenses:kafka:http://localhost:3030",
158 | "username",
159 | "pasword");
160 |
161 | Statement stmt = conn.createStatement();
162 |
163 | ResultSet rs = stmt.executeQuery("SELECT name, address.postcode FROM mytopic WHERE _ktype='STRING' AND _vtype='AVRO'");
164 | while (rs.next()) {
165 | System.out.println(rs.getString("name"));
166 | System.out.println(rs.getString("address.postcode"));
167 | }
168 | }
169 |
170 | public void insert() throws SQLException {
171 | Statement stmt = conn.createStatement();
172 | int result = stmt.executeUpdate("INSERT INTO mytopic (name, city, lat, long) VALUES ('James T Kirk', 'Iowa City', 43.3, -54.2)");
173 | stmt.getResultSet();
174 | }
175 |
176 | public void preparedStatementmeta() throws SQLException {
177 |
178 | Map values = new HashMap<>();
179 | values.put("name", "Walter White");
180 | values.put("city", "Albuquerque");
181 | values.put("lat", 51.0);
182 | values.put("long", 12.3);
183 |
184 | Connection conn = DriverManager.getConnection("jdbc:lenses:kafka:https://localhost:3030", "user", "pass");
185 | PreparedStatement stmt = conn.prepareStatement("INSERT INTO mytopic (name, city, lat, long) VALUES (?, ?, ?, ?)");
186 | ResultSetMetaData meta = stmt.getMetaData();
187 | for (int k = 1; k <= meta.getColumnCount(); k++) {
188 | String columnName = meta.getColumnName(k);
189 | Object value = values.get(columnName);
190 | stmt.setObject(k, value);
191 | }
192 | stmt.execute();
193 | }
194 | }
195 |
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/queries/CreateTableTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.queries
2 |
3 | import io.kotlintest.matchers.collections.shouldContain
4 | import io.kotlintest.shouldBe
5 | import io.kotlintest.specs.FunSpec
6 | import io.lenses.jdbc4.LensesDriver
7 | import io.lenses.jdbc4.ProducerSetup
8 | import io.lenses.jdbc4.resultset.toList
9 |
10 | class CreateTableTest : FunSpec(), ProducerSetup {
11 | init {
12 |
13 | LensesDriver()
14 |
15 | val conn = conn()
16 |
17 | test("CREATE TABLE foo") {
18 |
19 | val tableName = "testtable__" + System.currentTimeMillis()
20 |
21 | val stmt1 = conn.createStatement()
22 | val rs = stmt1.executeQuery("CREATE TABLE $tableName (a text, b int, c boolean) FORMAT (json, json)")
23 | rs.metaData.columnCount shouldBe 2
24 | List(2) { rs.metaData.getColumnLabel(it + 1) } shouldBe listOf("flag", "info")
25 |
26 | val stmt2 = conn.createStatement()
27 | val rs2 = stmt2.executeQuery("SHOW TABLES").toList()
28 | rs2.shouldContain(listOf(tableName, "USER", "1", "1"))
29 | }
30 | }
31 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/queries/DeleteTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.queries
2 |
3 | import io.kotlintest.eventually
4 | import io.kotlintest.matchers.collections.shouldHaveSize
5 | import io.kotlintest.specs.FunSpec
6 | import io.lenses.jdbc4.LensesDriver
7 | import io.lenses.jdbc4.data.MovieData
8 | import io.lenses.jdbc4.resultset.toList
9 | import org.apache.kafka.common.config.TopicConfig
10 | import java.time.Duration
11 |
12 | class DeleteTest : FunSpec(), MovieData {
13 | init {
14 |
15 | LensesDriver()
16 | val conn = conn()
17 |
18 | test("DELETE from table test") {
19 |
20 | val topic = populateMovies(conn)
21 |
22 | conn.createStatement().executeQuery("SELECT * FROM $topic").toList().shouldHaveSize(3)
23 | conn.createStatement().executeUpdate("DELETE FROM $topic WHERE name = 'Interstellar'")
24 | // takes a few seconds to kick in on kafka
25 | eventually(Duration.ofSeconds(5), AssertionError::class.java) {
26 | val result = conn.createStatement().executeQuery("SELECT * FROM $topic").toList()
27 | // kafka will insert a new record with the key and value == null
28 | result.shouldHaveSize(4)
29 | }
30 | }
31 |
32 | test("DELETE from table using _value") {
33 |
34 | val topic = populateMovies(conn)
35 | conn.createStatement().executeUpdate("DELETE FROM $topic WHERE _value.year = 1968")
36 | // takes a few seconds to kick in on kafka
37 | eventually(Duration.ofSeconds(5), AssertionError::class.java) {
38 | val result = conn.createStatement().executeQuery("SELECT * FROM $topic").toList()
39 | // kafka will insert a new record with the key and value == null
40 | result.shouldHaveSize(4)
41 | }
42 | }
43 | }
44 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/queries/DescribeTableTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.queries
2 |
3 | import io.kotlintest.matchers.collections.shouldContain
4 | import io.kotlintest.shouldBe
5 | import io.kotlintest.specs.FunSpec
6 | import io.lenses.jdbc4.LensesDriver
7 | import io.lenses.jdbc4.ProducerSetup
8 | import io.lenses.jdbc4.resultset.toList
9 |
10 | class DescribeTableTest : FunSpec(), ProducerSetup {
11 | init {
12 |
13 | LensesDriver()
14 | val conn = conn()
15 |
16 | test("DESCRIBE TABLE with primitive for key/value") {
17 | val topic=newTopicName()
18 | conn.createStatement().executeUpdate("""
19 | CREATE TABLE $topic(_key string, value string) format(string, string)
20 | """.trimIndent())
21 |
22 | val q = "DESCRIBE TABLE $topic"
23 | val stmt = conn.createStatement()
24 | val rs = stmt.executeQuery(q)
25 | rs.metaData.columnCount shouldBe 2
26 | List(2) { rs.metaData.getColumnLabel(it + 1) } shouldBe listOf("key", "value")
27 | }
28 |
29 | test("DESCRIBE TABLE for Avro stored value") {
30 | val topic = newTopicName()
31 | conn.createStatement().executeQuery("""
32 | CREATE TABLE $topic (_key int, id int, name string, quantity int, price double)
33 | FORMAT(INT, Avro);
34 | """.trimIndent())
35 | val q = "DESCRIBE TABLE $topic"
36 | val stmt = conn.createStatement()
37 | val rs = stmt.executeQuery(q).toList()
38 | rs.shouldContain(listOf("_value.id", "int"))
39 | rs.shouldContain(listOf("_value.name", "string"))
40 | rs.shouldContain(listOf("_value.quantity", "int"))
41 | rs.shouldContain(listOf("_value.price", "double"))
42 | rs.shouldContain(listOf("Value", "AVRO"))
43 | rs.shouldContain(listOf("Key", "INT"))
44 | }
45 | }
46 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/queries/InsertTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.queries
2 |
3 | import io.kotlintest.matchers.collections.shouldHaveSize
4 | import io.kotlintest.shouldBe
5 | import io.kotlintest.specs.FunSpec
6 | import io.lenses.jdbc4.LensesDriver
7 | import io.lenses.jdbc4.ProducerSetup
8 | import io.lenses.jdbc4.resultset.toList
9 |
10 | class InsertTest : FunSpec(), ProducerSetup {
11 | init {
12 |
13 | LensesDriver()
14 |
15 | val conn = conn()
16 |
17 | test("INSERT into table test") {
18 |
19 | val tableName = "testtable__" + System.currentTimeMillis()
20 |
21 | val stmt1 = conn.createStatement()
22 | val rs = stmt1.executeQuery("CREATE TABLE $tableName (a text, b int, c boolean) FORMAT (json, json)")
23 | rs.metaData.columnCount shouldBe 2
24 | List(2) { rs.metaData.getColumnLabel(it + 1) } shouldBe listOf("flag", "info")
25 |
26 | conn.createStatement().executeUpdate("INSERT INTO $tableName (a,b,c) VALUES('hello', 2, true)")
27 | conn.createStatement().executeUpdate("INSERT INTO $tableName (a,b,c) VALUES('world', 5, false)")
28 | stmt1.executeQuery("SELECT * FROM $tableName").toList().shouldHaveSize(2)
29 | }
30 | }
31 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/queries/PollTopicViaJdbcTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.queries
2 |
3 | import io.kotlintest.matchers.collections.shouldBeEmpty
4 | import io.kotlintest.matchers.collections.shouldHaveSize
5 | import io.kotlintest.shouldBe
6 | import io.kotlintest.specs.WordSpec
7 | import io.lenses.jdbc4.LensesDriver
8 | import io.lenses.jdbc4.ProducerSetup
9 | import io.lenses.jdbc4.resultset.toList
10 | import java.sql.DriverManager
11 |
12 | class PollTopicViaJdbcTest : WordSpec(), ProducerSetup {
13 |
14 | init {
15 |
16 | LensesDriver()
17 |
18 | val conn = conn()
19 |
20 |
21 | "JDBC Driver" should {
22 | "read a topic via polling" {
23 | val topic = newTopicName()
24 | conn.createStatement().executeQuery("""
25 | CREATE TABLE $topic(_key string, name string, difficulty int) FORMAT (Avro, Avro);
26 | """.trimIndent())
27 |
28 | var lastOffset = 0
29 | val sql1 = "SELECT * FROM $topic WHERE _meta.partition=0 and _meta.offset >= $lastOffset"
30 | conn.createStatement().executeQuery(sql1).toList().shouldHaveSize(0)
31 |
32 |
33 | conn.createStatement().executeQuery("""
34 | INSERT INTO $topic(_key, name, difficulty)
35 | VALUES("1", "Learn Lenses SQL", 3),
36 | ("2", "Learn Quantum Physics", 10);
37 | """.trimIndent())
38 |
39 | val sql2 = "SELECT *, _meta.offset as _offset, _key as key FROM $topic WHERE _meta.partition=0 and _meta.offset >= $lastOffset"
40 | val rs = conn.createStatement().executeQuery(sql2)
41 | val list = mutableListOf()
42 | while (rs.next()) {
43 | val key = rs.getString("key")
44 | val name = rs.getString("name")
45 | val difficulty = rs.getInt("difficulty")
46 | val offset = rs.getInt("_offset")
47 | list.add(listOf(key, name, difficulty, offset))
48 | }
49 | list.shouldHaveSize(2)
50 | list shouldBe listOf(
51 | listOf("1", "Learn Lenses SQL", 3, 0),
52 | listOf("2", "Learn Quantum Physics", 10, 1)
53 | )
54 | lastOffset = 1
55 |
56 | val sql3 = "SELECT *, _meta.offset as _offset, _key as key FROM $topic WHERE _meta.partition=0 and _meta.offset > $lastOffset"
57 | conn.createStatement().executeQuery(sql3).toList().shouldBeEmpty()
58 | conn.createStatement().executeQuery("""
59 | INSERT INTO $topic(_key, name, difficulty)
60 | VALUES("3", "Learn French", 5);
61 | """.trimIndent())
62 |
63 | val sql4 = "SELECT *, _meta.offset as _offset, _key as key FROM $topic WHERE _meta.partition=0 and _meta.offset > $lastOffset"
64 | val rs2 = conn.createStatement().executeQuery(sql4)
65 | val oneRecordList = mutableListOf()
66 | while (rs2.next()) {
67 | val key = rs2.getString("key")
68 | val name = rs2.getString("name")
69 | val difficulty = rs2.getInt("difficulty")
70 | val offset = rs2.getInt("_offset")
71 | oneRecordList.add(listOf(key, name, difficulty, offset))
72 | }
73 | oneRecordList.shouldHaveSize(1)
74 | oneRecordList shouldBe listOf(
75 | listOf(
76 | "3", "Learn French", 5, 2
77 | )
78 | )
79 | lastOffset = 2
80 | val sql5 = "SELECT *, _meta.offset as _offset, _key as key FROM $topic WHERE _meta.partition=0 and _meta.offset > $lastOffset"
81 | conn.createStatement().executeQuery(sql5).toList().shouldBeEmpty()
82 | }
83 | }
84 | }
85 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/queries/SelectTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.queries
2 |
3 | import io.kotlintest.assertSoftly
4 | import io.kotlintest.matchers.collections.shouldHaveSize
5 | import io.kotlintest.shouldBe
6 | import io.kotlintest.shouldThrow
7 | import io.kotlintest.specs.WordSpec
8 | import io.lenses.jdbc4.LensesDriver
9 | import io.lenses.jdbc4.ProducerSetup
10 | import io.lenses.jdbc4.resultset.toList
11 | import java.sql.Connection
12 | import java.sql.DriverManager
13 | import java.sql.SQLException
14 |
15 | class SelectTest : WordSpec(), ProducerSetup {
16 |
17 | init {
18 |
19 | LensesDriver()
20 |
21 | val conn = conn()
22 |
23 | fun createTopic(conn: Connection): String {
24 | val topic = newTopicName()
25 | conn.createStatement().executeQuery("""
26 | create table $topic(
27 | _key string,
28 | VendorID int,
29 | tpep_pickup_datetime string,
30 | tpep_dropoff_datetime string,
31 | passenger_count int,
32 | trip_distance double,
33 | pickup_longitude double,
34 | pickup_latitude double,
35 | RateCodeID int,
36 | store_and_fwd_flag string,
37 | dropoff_longitude double,
38 | dropoff_latitude double,
39 | payment_type int,
40 | fare_amount double,
41 | extra double,
42 | mta_tax double,
43 | improvement_surcharge double,
44 | tip_amount double,
45 | tolls_amount double,
46 | total_amount double)
47 | format(string, avro)
48 | """.trimIndent()).toList().shouldHaveSize(1)
49 | return topic
50 | }
51 |
52 | "JDBC Driver" should {
53 | "throw error for unknown table" {
54 | shouldThrow {
55 | val q = "SELECT * FROM `qweqweqwe`"
56 | val stmt = conn.createStatement()
57 | stmt.executeQuery(q)
58 | }
59 | }
60 | "support wildcard selection" {
61 |
62 | val topic = createTopic(conn)
63 | createTopicData(conn, topic)
64 | val q = "SELECT * FROM $topic"
65 | val stmt = conn.createStatement()
66 | val rs = stmt.executeQuery(q)
67 | rs.metaData.columnCount shouldBe 19
68 | List(19) { rs.metaData.getColumnLabel(it + 1) }.toSet() shouldBe
69 | setOf("VendorID",
70 | "tpep_pickup_datetime",
71 | "tpep_dropoff_datetime",
72 | "passenger_count",
73 | "trip_distance",
74 | "pickup_longitude",
75 | "pickup_latitude",
76 | "RateCodeID",
77 | "store_and_fwd_flag",
78 | "dropoff_longitude",
79 | "dropoff_latitude",
80 | "payment_type",
81 | "fare_amount",
82 | "extra",
83 | "mta_tax",
84 | "improvement_surcharge",
85 | "tip_amount",
86 | "tolls_amount",
87 | "total_amount")
88 | }
89 | "support projections" {
90 | val topic = createTopic(conn)
91 | createTopicData(conn, topic)
92 | val q = "SELECT trip_distance, payment_type FROM $topic"
93 | val stmt = conn.createStatement()
94 | val rs = stmt.executeQuery(q)
95 | rs.metaData.columnCount shouldBe 2
96 | assertSoftly {
97 | rs.metaData.getColumnLabel(1) shouldBe "trip_distance"
98 | rs.metaData.getColumnLabel(2) shouldBe "payment_type"
99 | }
100 | }
101 | "support projections with backticks" {
102 | val topic = createTopic(conn)
103 | createTopicData(conn, topic)
104 | val q = "SELECT `trip_distance`, `payment_type` FROM $topic"
105 | val stmt = conn.createStatement()
106 | val rs = stmt.executeQuery(q)
107 | rs.metaData.columnCount shouldBe 2
108 | assertSoftly {
109 | rs.metaData.getColumnLabel(1) shouldBe "trip_distance"
110 | rs.metaData.getColumnLabel(2) shouldBe "payment_type"
111 | }
112 | }
113 | "support queries with white space" {
114 | val topic = createTopic(conn)
115 | createTopicData(conn, topic)
116 | val q = "SELECT `trip_distance`, `payment_type` FROM $topic"
117 | val stmt = conn.createStatement()
118 | val rs = stmt.executeQuery(q)
119 | rs.metaData.columnCount shouldBe 2
120 | assertSoftly {
121 | rs.metaData.getColumnLabel(1) shouldBe "trip_distance"
122 | rs.metaData.getColumnLabel(2) shouldBe "payment_type"
123 | }
124 | }
125 | "support queries with new lines" {
126 | val topic = createTopic(conn)
127 | createTopicData(conn, topic)
128 | val q = "SELECT `trip_distance`, \n" +
129 | "`payment_type` \n" +
130 | " FROM $topic"
131 | val stmt = conn.createStatement()
132 | val rs = stmt.executeQuery(q)
133 | rs.metaData.columnCount shouldBe 2
134 | assertSoftly {
135 | rs.metaData.getColumnLabel(1) shouldBe "trip_distance"
136 | rs.metaData.getColumnLabel(2) shouldBe "payment_type"
137 | }
138 | }
139 | "support limits" {
140 | val topic = createTopic(conn)
141 | createTopicData(conn, topic)
142 | val q = "SELECT trip_distance, payment_type FROM $topic limit 3"
143 | val stmt = conn.createStatement()
144 | val rs = stmt.executeQuery(q)
145 | rs.metaData.columnCount shouldBe 2
146 | assertSoftly {
147 | rs.metaData.getColumnLabel(1) shouldBe "trip_distance"
148 | rs.metaData.getColumnLabel(2) shouldBe "payment_type"
149 | }
150 | var counter = 0
151 | while (rs.next()) {
152 | counter += 1
153 | }
154 | counter shouldBe 3
155 | }
156 | "support where" {
157 | val topic = createTopic(conn)
158 | createTopicData(conn, topic)
159 | val q = "SELECT trip_distance FROM $topic where trip_distance > 2"
160 | val stmt = conn.createStatement()
161 | val rs = stmt.executeQuery(q)
162 | rs.metaData.columnCount shouldBe 1
163 | assertSoftly {
164 | rs.metaData.getColumnLabel(1) shouldBe "trip_distance"
165 | }
166 | }
167 | "support where with backticks" {
168 | val topic = createTopic(conn)
169 | createTopicData(conn, topic)
170 | val q = "SELECT `trip_distance` FROM `$topic` where `trip_distance` > 2"
171 | val stmt = conn.createStatement()
172 | val rs = stmt.executeQuery(q)
173 | rs.metaData.columnCount shouldBe 1
174 | assertSoftly {
175 | rs.metaData.getColumnLabel(1) shouldBe "trip_distance"
176 | }
177 | }
178 | "return true for valid query" {
179 | val topic = createTopic(conn)
180 | createTopicData(conn, topic)
181 | conn.createStatement().execute("SELECT trip_distance, payment_type FROM `$topic` limit 43") shouldBe true
182 | }
183 | }
184 | }
185 |
186 | private fun createTopicData(conn: Connection, topic: String) {
187 | conn.createStatement()
188 | .executeQuery("""
189 | insert into $topic(
190 | VendorID,
191 | tpep_pickup_datetime,
192 | tpep_dropoff_datetime,
193 | passenger_count,
194 | trip_distance,
195 | pickup_longitude,
196 | pickup_latitude,
197 | RateCodeID,
198 | store_and_fwd_flag,
199 | dropoff_longitude,
200 | dropoff_latitude,
201 | payment_type,
202 | fare_amount,
203 | extra,
204 | mta_tax,
205 | improvement_surcharge,
206 | tip_amount,
207 | tolls_amount,
208 | total_amount)
209 | VALUES
210 | (1,'2016-03-20 19:20:09', '2016-03-20 19:23:25', 1, 0.7 , -73.99173736572266, 40.7386589050293, 1, 'N', -73.99967956542969, 40.73412322998047, 1, 4.5, 0, 0.5, 0.3, 1, 0, 6.3),
211 | (1,'2016-03-20 19:20:10', '2016-03-20 19:25:52', 2, 1.4 , -73.99103546142578, 40.76046371459961, 1, 'N', -73.98226928710938, 40.77155303955078, 2, 5.5, 0, 0.5, 0.3, 0, 0, 6.3),
212 | (2,'2016-03-20 19:20:11', '2016-03-20 19:27:09', 1, 0.66, -73.98735046386719, 40.75653076171875, 1, 'N', -73.9967269897461, 40.76028060913086, 1, 32.5, 0, 0.5, 0.3, 5, 5.54, 43.84),
213 | (2,'2016-03-20 19:20:13', '2016-03-20 19:26:47', 1, 1.61, -73.96199035644531, 40.77945327758789, 1, 'N', -73.98197937011719, 40.77967071533203, 1, 4.5, 0, 0.5, 0.3, 2, 0, 7.3),
214 | (1,'2016-03-20 19:20:15', '2016-03-20 19:25:46', 1, 1.9 ,-74.00791931152344, 40.74011993408203, 1, 'N', -74.0147933959961, 40.715457916259766, 1, 15.5, 0, 0.5, 0.3, 2.5, 0, 18.8),
215 | (1,'2016-03-20 19:20:17', '2016-03-20 19:25:14', 1, 1.3 ,-73.95572662353516, 40.78500747680664, 1, 'N', -73.96957397460938, 40.79618453979492, 1, 4.5, 0, 0.5, 0.3, 1.05, 0, 6.35),
216 | (2,'2016-03-20 19:20:18', '2016-03-20 19:23:29', 1, 0.55, -73.99325561523438, 40.72775650024414, 1, 'N', -73.987060546875, 40.729312896728516, 1, 14.5, 0, 0.5, 0.3, 3.8, 0, 19.1),
217 | (2,'2016-03-20 19:20:20', '2016-03-20 19:43:50', 1, 4.17, -74.0016860961914, 40.73445129394531, 1, 'N', -73.96067810058594, 40.77259826660156, 2, 16.5, 0, 0.5, 0.3, 0, 0, 17.3),
218 | (1,'2016-03-20 19:20:23', '2016-03-20 19:23:25', 1, 0.6 ,-73.94763946533203, 40.77100372314453, 1, 'N', -73.95655059814453, 40.775543212890625, 1, 8.5, 0, 0.5, 0.3, 2.79, 0, 12.09),
219 | (1,'2016-03-20 19:20:25', '2016-03-20 19:31:51', 1, 2.9 ,-73.98570251464844, 40.74380111694336, 1, 'N', -73.96024322509766, 40.780582427978516, 2, 15.5, 0, 0.5, 0.3, 0, 0, 16.3)
220 | """.trimIndent()).toList().shouldHaveSize(1)
221 | }
222 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/queries/ShowFunctionsTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.queries
2 |
3 | import io.kotlintest.matchers.collections.shouldContain
4 | import io.kotlintest.shouldBe
5 | import io.kotlintest.specs.FunSpec
6 | import io.lenses.jdbc4.LensesDriver
7 | import io.lenses.jdbc4.ProducerSetup
8 | import io.lenses.jdbc4.resultset.toList
9 |
10 | class ShowFunctionsTest : FunSpec(), ProducerSetup {
11 | init {
12 |
13 | LensesDriver()
14 |
15 | val conn = conn()
16 |
17 | test("SHOW FUNCTIONS schema") {
18 | val stmt = conn.createStatement()
19 | val rs = stmt.executeQuery("SHOW FUNCTIONS")
20 | rs.metaData.columnCount shouldBe 3
21 | List(3) { rs.metaData.getColumnLabel(it + 1) } shouldBe listOf("name", "description", "return_type")
22 | }
23 |
24 | test("SHOW FUNCTIONS data") {
25 | val stmt = conn.createStatement()
26 | val rs = stmt.executeQuery("SHOW FUNCTIONS").toList()
27 | rs.shouldContain(listOf("exists", "Returns true if the given field is present in the payload or false otherwise.", "boolean"))
28 | }
29 | }
30 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/queries/ShowTablesTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.queries
2 |
3 | import io.kotlintest.matchers.collections.shouldContain
4 | import io.kotlintest.shouldBe
5 | import io.kotlintest.specs.FunSpec
6 | import io.lenses.jdbc4.LensesDriver
7 | import io.lenses.jdbc4.ProducerSetup
8 | import io.lenses.jdbc4.resultset.toList
9 | import java.sql.DriverManager
10 |
11 | class ShowTablesTest : FunSpec() , ProducerSetup {
12 | init {
13 |
14 | LensesDriver()
15 |
16 | val conn = conn()
17 |
18 | test("SHOW TABLES schema") {
19 | val q = "SHOW TABLES"
20 | val stmt = conn.createStatement()
21 | val rs = stmt.executeQuery(q)
22 | rs.metaData.columnCount shouldBe 4
23 | List(4) { rs.metaData.getColumnLabel(it + 1) } shouldBe listOf("name", "type", "partitions", "replicas")
24 | }
25 |
26 | test("SHOW TABLES data") {
27 | val topic1 = newTopicName()
28 | val topic2 = newTopicName()
29 |
30 | conn.createStatement().executeQuery("""
31 | CREATE TABLE $topic1 (_key int, id int, name string, quantity int, price double) FORMAT(INT, Avro) properties(partitions=3);
32 | CREATE TABLE $topic2 (_key int, id int, name string, quantity int, price double) FORMAT(INT, Json) properties(partitions=4);
33 | """.trimIndent())
34 | val q = "SHOW TABLES"
35 | val stmt = conn.createStatement()
36 | val rs = stmt.executeQuery(q).toList()
37 | rs.shouldContain(listOf(topic1, "USER", "3", "1"))
38 | rs.shouldContain(listOf(topic2, "USER", "4", "1"))
39 | }
40 | }
41 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/queries/ShowVirtualTablesTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.queries
2 |
3 | import io.kotlintest.matchers.collections.shouldContain
4 | import io.kotlintest.shouldBe
5 | import io.kotlintest.specs.FunSpec
6 | import io.lenses.jdbc4.LensesDriver
7 | import io.lenses.jdbc4.ProducerSetup
8 | import io.lenses.jdbc4.resultset.toList
9 |
10 | class ShowVirtualTablesTest : FunSpec(), ProducerSetup {
11 | init {
12 |
13 | LensesDriver()
14 |
15 | val conn = conn()
16 |
17 | test("SHOW VIRTUAL TABLES schema") {
18 | val q = "SHOW VIRTUAL TABLES"
19 | val stmt = conn.createStatement()
20 | val rs = stmt.executeQuery(q)
21 | rs.metaData.columnCount shouldBe 2
22 | List(2) { rs.metaData.getColumnLabel(it + 1) } shouldBe listOf("name", "description")
23 | }
24 |
25 | test("SHOW VIRTUAL TABLES data") {
26 | val q = "SHOW VIRTUAL TABLES"
27 | val stmt = conn.createStatement()
28 | val rs = stmt.executeQuery(q).toList()
29 | println(rs)
30 | rs.shouldContain(listOf("__dual", "A virtual table with a single row and field"))
31 | rs.shouldContain(listOf("__fields", "Lists all fields known to this connection"))
32 | }
33 | }
34 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/queries/SingleFieldSchemaQueryTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.queries
2 |
3 | import io.kotlintest.shouldBe
4 | import io.kotlintest.specs.WordSpec
5 | import io.lenses.jdbc4.LensesDriver
6 | import io.lenses.jdbc4.ProducerSetup
7 | import org.apache.avro.SchemaBuilder
8 | import org.apache.avro.generic.GenericData
9 | import org.apache.kafka.clients.producer.KafkaProducer
10 | import org.apache.kafka.clients.producer.ProducerRecord
11 | import java.sql.Connection
12 | import java.util.*
13 |
14 | data class Country(val name: String)
15 |
16 | class SingleFieldSchemaQueryTest : WordSpec(), ProducerSetup {
17 |
18 | private val topic = "topic_" + UUID.randomUUID().toString().replace('-', '_')
19 |
20 | private fun populateCountries() {
21 | val countries = listOf(
22 | Country("Vanuatu"),
23 | Country("Comoros")
24 | )
25 | val schema = SchemaBuilder.record("country").fields().requiredString("name").endRecord()
26 | val producer = KafkaProducer(producerProps())
27 | for (country in countries) {
28 | val record = GenericData.Record(schema)
29 | record.put("name", country.name)
30 | producer.send(ProducerRecord(topic, country.name, record))
31 | }
32 | }
33 |
34 | fun createData(connection: Connection, topic: String) {
35 | val sqlCreate = """
36 | CREATE TABLE $topic(_key string, name string) format(string, avro);
37 | """.trimIndent()
38 | connection.createStatement().executeQuery(sqlCreate)
39 |
40 | val sqlData = """
41 | INSERT INTO $topic(_key, name) VALUES('Vanuatu','Vanuatu'), ('Comoros','Comoros');
42 | """.trimIndent()
43 | connection.createStatement().executeQuery(sqlData)
44 | }
45 |
46 |
47 | init {
48 |
49 | LensesDriver()
50 | val connection = conn()
51 | createData(connection, topic)
52 |
53 | "JDBC Driver" should {
54 | "support wildcard for fixed schemas" {
55 | val q = "SELECT * FROM $topic"
56 | val stmt = connection.createStatement()
57 | val rs = stmt.executeQuery(q)
58 | rs.metaData.columnCount shouldBe 1
59 | rs.metaData.getColumnLabel(1) shouldBe "name"
60 | }
61 | "support projection for fixed schemas" {
62 | val q = "SELECT name FROM $topic"
63 | val stmt = connection.createStatement()
64 | val rs = stmt.executeQuery(q)
65 | rs.metaData.columnCount shouldBe 1
66 | rs.metaData.getColumnLabel(1) shouldBe "name"
67 | }
68 | }
69 | }
70 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/queries/SynonymTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.queries
2 |
3 | import arrow.core.Try
4 | import io.kotlintest.matchers.collections.shouldHaveSize
5 | import io.kotlintest.shouldBe
6 | import io.kotlintest.shouldThrow
7 | import io.kotlintest.specs.FunSpec
8 | import io.lenses.jdbc4.LensesDriver
9 | import io.lenses.jdbc4.ProducerSetup
10 | import io.lenses.jdbc4.resultset.toList
11 | import java.sql.Connection
12 | import java.sql.SQLException
13 |
14 | class SynonymTest : FunSpec(), ProducerSetup {
15 | init {
16 |
17 | LensesDriver()
18 |
19 | val conn = conn()
20 |
21 | test("Synonym test") {
22 | val topic = createTopic(conn)
23 | createTopicData(conn, topic)
24 | val synonymName = "mysynonim" + System.currentTimeMillis()
25 | Try { conn.createStatement().executeUpdate("DROP Synonym $synonymName") }
26 | conn.createStatement().executeUpdate("CREATE Synonym $synonymName FOR $topic")
27 |
28 | val rs = conn.createStatement().executeQuery("SELECT tpep_pickup_datetime, VendorID FROM $synonymName limit 10")
29 | rs.metaData.columnCount shouldBe 2
30 | List(2) { rs.metaData.getColumnLabel(it + 1) } shouldBe listOf("tpep_pickup_datetime", "VendorID")
31 | rs.toList().shouldHaveSize(10)
32 |
33 | conn.createStatement().executeUpdate("DROP Synonym $synonymName")
34 | shouldThrow {
35 | conn.createStatement().executeQuery("SELECT * FROM $synonymName")
36 | }
37 | }
38 | }
39 |
40 | fun createTopic(conn: Connection): String {
41 | val topic = newTopicName()
42 | conn.createStatement().executeQuery("""
43 | create table $topic(
44 | _key string,
45 | VendorID int,
46 | tpep_pickup_datetime string,
47 | tpep_dropoff_datetime string,
48 | passenger_count int,
49 | trip_distance double,
50 | pickup_longitude double,
51 | pickup_latitude double,
52 | RateCodeID int,
53 | store_and_fwd_flag string,
54 | dropoff_longitude double,
55 | dropoff_latitude double,
56 | payment_type int,
57 | fare_amount double,
58 | extra double,
59 | mta_tax double,
60 | improvement_surcharge double,
61 | tip_amount double,
62 | tolls_amount double,
63 | total_amount double)
64 | format(string, avro)
65 | """.trimIndent()).toList().shouldHaveSize(1)
66 | return topic
67 | }
68 |
69 | private fun createTopicData(conn: Connection, topic: String) {
70 | conn.createStatement()
71 | .executeQuery("""
72 | insert into $topic(
73 | VendorID,
74 | tpep_pickup_datetime,
75 | tpep_dropoff_datetime,
76 | passenger_count,
77 | trip_distance,
78 | pickup_longitude,
79 | pickup_latitude,
80 | RateCodeID,
81 | store_and_fwd_flag,
82 | dropoff_longitude,
83 | dropoff_latitude,
84 | payment_type,
85 | fare_amount,
86 | extra,
87 | mta_tax,
88 | improvement_surcharge,
89 | tip_amount,
90 | tolls_amount,
91 | total_amount)
92 | VALUES
93 | (1,'2016-03-20 19:20:09', '2016-03-20 19:23:25', 1, 0.7 , -73.99173736572266, 40.7386589050293, 1, 'N', -73.99967956542969, 40.73412322998047, 1, 4.5, 0, 0.5, 0.3, 1, 0, 6.3),
94 | (1,'2016-03-20 19:20:10', '2016-03-20 19:25:52', 2, 1.4 , -73.99103546142578, 40.76046371459961, 1, 'N', -73.98226928710938, 40.77155303955078, 2, 5.5, 0, 0.5, 0.3, 0, 0, 6.3),
95 | (2,'2016-03-20 19:20:11', '2016-03-20 19:27:09', 1, 0.66, -73.98735046386719, 40.75653076171875, 1, 'N', -73.9967269897461, 40.76028060913086, 1, 32.5, 0, 0.5, 0.3, 5, 5.54, 43.84),
96 | (2,'2016-03-20 19:20:13', '2016-03-20 19:26:47', 1, 1.61, -73.96199035644531, 40.77945327758789, 1, 'N', -73.98197937011719, 40.77967071533203, 1, 4.5, 0, 0.5, 0.3, 2, 0, 7.3),
97 | (1,'2016-03-20 19:20:15', '2016-03-20 19:25:46', 1, 1.9 ,-74.00791931152344, 40.74011993408203, 1, 'N', -74.0147933959961, 40.715457916259766, 1, 15.5, 0, 0.5, 0.3, 2.5, 0, 18.8),
98 | (1,'2016-03-20 19:20:17', '2016-03-20 19:25:14', 1, 1.3 ,-73.95572662353516, 40.78500747680664, 1, 'N', -73.96957397460938, 40.79618453979492, 1, 4.5, 0, 0.5, 0.3, 1.05, 0, 6.35),
99 | (2,'2016-03-20 19:20:18', '2016-03-20 19:23:29', 1, 0.55, -73.99325561523438, 40.72775650024414, 1, 'N', -73.987060546875, 40.729312896728516, 1, 14.5, 0, 0.5, 0.3, 3.8, 0, 19.1),
100 | (2,'2016-03-20 19:20:20', '2016-03-20 19:43:50', 1, 4.17, -74.0016860961914, 40.73445129394531, 1, 'N', -73.96067810058594, 40.77259826660156, 2, 16.5, 0, 0.5, 0.3, 0, 0, 17.3),
101 | (1,'2016-03-20 19:20:23', '2016-03-20 19:23:25', 1, 0.6 ,-73.94763946533203, 40.77100372314453, 1, 'N', -73.95655059814453, 40.775543212890625, 1, 8.5, 0, 0.5, 0.3, 2.79, 0, 12.09),
102 | (1,'2016-03-20 19:20:25', '2016-03-20 19:31:51', 1, 2.9 ,-73.98570251464844, 40.74380111694336, 1, 'N', -73.96024322509766, 40.780582427978516, 2, 15.5, 0, 0.5, 0.3, 0, 0, 16.3)
103 | """.trimIndent()).toList().shouldHaveSize(1)
104 | }
105 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/queries/TruncateTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.queries
2 |
3 | import io.kotlintest.eventually
4 | import io.kotlintest.matchers.collections.shouldHaveSize
5 | import io.kotlintest.specs.FunSpec
6 | import io.lenses.jdbc4.data.MovieData
7 | import io.lenses.jdbc4.resultset.toList
8 | import java.time.Duration
9 |
10 | class TruncateTest : FunSpec(), MovieData {
11 | init {
12 |
13 | test("truncate table") {
14 | val conn = conn()
15 | val topic = populateMovies(conn, false)
16 |
17 | conn.createStatement().executeQuery("SELECT * FROM `$topic`").toList().shouldHaveSize(3)
18 | conn.createStatement().executeUpdate("TRUNCATE TABLE `$topic`")
19 | // use eventually because the topic has to be deleted and recreated which can take a few seconds
20 | eventually(Duration.ofSeconds(15)) {
21 | conn.createStatement().executeQuery("SELECT * FROM `$topic`").toList().shouldHaveSize(0)
22 | }
23 | }
24 | }
25 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/queries/ViewTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.queries
2 |
3 | import arrow.core.Try
4 | import io.kotlintest.matchers.collections.shouldHaveSize
5 | import io.kotlintest.shouldBe
6 | import io.kotlintest.shouldThrow
7 | import io.kotlintest.specs.FunSpec
8 | import io.lenses.jdbc4.LensesDriver
9 | import io.lenses.jdbc4.ProducerSetup
10 | import io.lenses.jdbc4.resultset.toList
11 | import java.sql.Connection
12 | import java.sql.SQLException
13 |
14 | class ViewTest : FunSpec(), ProducerSetup {
15 | init {
16 |
17 | LensesDriver()
18 | val conn = conn()
19 |
20 | test("VIEW test") {
21 | val topic = createTopic(conn)
22 | createTopicData(conn, topic)
23 |
24 | val view = "myview" + System.currentTimeMillis()
25 | Try { conn.createStatement().executeUpdate("DROP VIEW $view") }
26 | conn.createStatement().executeUpdate("CREATE VIEW $view AS SELECT tpep_pickup_datetime, VendorID FROM $topic limit 10")
27 | val rs = conn.createStatement().executeQuery("SELECT * FROM $view")
28 | rs.metaData.columnCount shouldBe 2
29 | List(2) { rs.metaData.getColumnLabel(it + 1) } shouldBe listOf("tpep_pickup_datetime", "VendorID")
30 |
31 | Try { conn.createStatement().executeUpdate("DROP VIEW $view") }
32 | shouldThrow {
33 | conn.createStatement().executeQuery("SELECT * FROM $view")
34 | }
35 | }
36 | }
37 |
38 | fun createTopic(conn: Connection): String {
39 | val topic = newTopicName()
40 | conn.createStatement().executeQuery("""
41 | create table $topic(
42 | _key string,
43 | VendorID int,
44 | tpep_pickup_datetime string,
45 | tpep_dropoff_datetime string,
46 | passenger_count int,
47 | trip_distance double,
48 | pickup_longitude double,
49 | pickup_latitude double,
50 | RateCodeID int,
51 | store_and_fwd_flag string,
52 | dropoff_longitude double,
53 | dropoff_latitude double,
54 | payment_type int,
55 | fare_amount double,
56 | extra double,
57 | mta_tax double,
58 | improvement_surcharge double,
59 | tip_amount double,
60 | tolls_amount double,
61 | total_amount double)
62 | format(string, avro)
63 | """.trimIndent()).toList().shouldHaveSize(1)
64 | return topic
65 | }
66 |
67 | private fun createTopicData(conn: Connection, topic: String) {
68 | conn.createStatement()
69 | .executeQuery("""
70 | insert into $topic(
71 | VendorID,
72 | tpep_pickup_datetime,
73 | tpep_dropoff_datetime,
74 | passenger_count,
75 | trip_distance,
76 | pickup_longitude,
77 | pickup_latitude,
78 | RateCodeID,
79 | store_and_fwd_flag,
80 | dropoff_longitude,
81 | dropoff_latitude,
82 | payment_type,
83 | fare_amount,
84 | extra,
85 | mta_tax,
86 | improvement_surcharge,
87 | tip_amount,
88 | tolls_amount,
89 | total_amount)
90 | VALUES
91 | (1,'2016-03-20 19:20:09', '2016-03-20 19:23:25', 1, 0.7 , -73.99173736572266, 40.7386589050293, 1, 'N', -73.99967956542969, 40.73412322998047, 1, 4.5, 0, 0.5, 0.3, 1, 0, 6.3),
92 | (1,'2016-03-20 19:20:10', '2016-03-20 19:25:52', 2, 1.4 , -73.99103546142578, 40.76046371459961, 1, 'N', -73.98226928710938, 40.77155303955078, 2, 5.5, 0, 0.5, 0.3, 0, 0, 6.3),
93 | (2,'2016-03-20 19:20:11', '2016-03-20 19:27:09', 1, 0.66, -73.98735046386719, 40.75653076171875, 1, 'N', -73.9967269897461, 40.76028060913086, 1, 32.5, 0, 0.5, 0.3, 5, 5.54, 43.84),
94 | (2,'2016-03-20 19:20:13', '2016-03-20 19:26:47', 1, 1.61, -73.96199035644531, 40.77945327758789, 1, 'N', -73.98197937011719, 40.77967071533203, 1, 4.5, 0, 0.5, 0.3, 2, 0, 7.3),
95 | (1,'2016-03-20 19:20:15', '2016-03-20 19:25:46', 1, 1.9 ,-74.00791931152344, 40.74011993408203, 1, 'N', -74.0147933959961, 40.715457916259766, 1, 15.5, 0, 0.5, 0.3, 2.5, 0, 18.8),
96 | (1,'2016-03-20 19:20:17', '2016-03-20 19:25:14', 1, 1.3 ,-73.95572662353516, 40.78500747680664, 1, 'N', -73.96957397460938, 40.79618453979492, 1, 4.5, 0, 0.5, 0.3, 1.05, 0, 6.35),
97 | (2,'2016-03-20 19:20:18', '2016-03-20 19:23:29', 1, 0.55, -73.99325561523438, 40.72775650024414, 1, 'N', -73.987060546875, 40.729312896728516, 1, 14.5, 0, 0.5, 0.3, 3.8, 0, 19.1),
98 | (2,'2016-03-20 19:20:20', '2016-03-20 19:43:50', 1, 4.17, -74.0016860961914, 40.73445129394531, 1, 'N', -73.96067810058594, 40.77259826660156, 2, 16.5, 0, 0.5, 0.3, 0, 0, 17.3),
99 | (1,'2016-03-20 19:20:23', '2016-03-20 19:23:25', 1, 0.6 ,-73.94763946533203, 40.77100372314453, 1, 'N', -73.95655059814453, 40.775543212890625, 1, 8.5, 0, 0.5, 0.3, 2.79, 0, 12.09),
100 | (1,'2016-03-20 19:20:25', '2016-03-20 19:31:51', 1, 2.9 ,-73.98570251464844, 40.74380111694336, 1, 'N', -73.96024322509766, 40.780582427978516, 2, 15.5, 0, 0.5, 0.3, 0, 0, 16.3)
101 | """.trimIndent()).toList().shouldHaveSize(1)
102 | }
103 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/resultset/ConvertingRowTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.resultset
2 |
3 | import io.kotlintest.shouldBe
4 | import io.kotlintest.specs.WordSpec
5 | import io.lenses.jdbc4.row.ListRow
6 | import java.math.BigDecimal
7 | import java.time.Instant
8 | import java.time.ZoneId
9 |
10 | class ConvertingRowTest : WordSpec() {
11 | init {
12 |
13 | "ConvertingRow" should {
14 | "convert char to String" {
15 | ListRow(listOf('a')).getString(1) shouldBe "a"
16 | }
17 | "convert number to String" {
18 | ListRow(listOf(1)).getString(1) shouldBe "1"
19 | ListRow(listOf(1L)).getString(1) shouldBe "1"
20 | ListRow(listOf(1.1)).getString(1) shouldBe "1.1"
21 | ListRow(listOf(1.1F)).getString(1) shouldBe "1.1"
22 | }
23 | "convert boolean to String" {
24 | ListRow(listOf(true)).getString(1) shouldBe "true"
25 | }
26 | "convert int to long" {
27 | ListRow(listOf(1)).getLong(1) shouldBe 1L
28 | }
29 | "convert String to long" {
30 | ListRow(listOf("1")).getLong(1) shouldBe 1L
31 | }
32 | "convert long to int" {
33 | ListRow(listOf(1L)).getInt(1) shouldBe 1
34 | }
35 | "convert String to int" {
36 | ListRow(listOf("1")).getInt(1) shouldBe 1
37 | }
38 | "convert String to boolean" {
39 | ListRow(listOf("true")).getBoolean(1) shouldBe true
40 | }
41 | "convert String to double" {
42 | ListRow(listOf("1.1")).getDouble(1) shouldBe 1.1
43 | }
44 | "convert String to float" {
45 | ListRow(listOf("1.1")).getFloat(1) shouldBe 1.1F
46 | }
47 | "convert Long to Date" {
48 | ListRow(listOf(123456789L)).getDate(1).time shouldBe Instant.ofEpochMilli(123456789L).atZone(ZoneId.of("Z")).toInstant().toEpochMilli()
49 | }
50 | "convert Int to Date" {
51 | ListRow(listOf(123)).getDate(1) shouldBe java.sql.Date.from(Instant.ofEpochMilli(123))
52 | }
53 | "convert Long to BigDecimal" {
54 | ListRow(listOf(123L)).getBigDecimal(1) shouldBe BigDecimal(123L)
55 | }
56 | "convert int to byte" {
57 | val b: Byte = 1
58 | ListRow(listOf("1")).getByte(1) shouldBe b
59 | }
60 | "convert String to Reader" {
61 | val reader = ListRow(listOf("abc")).charStream(1)
62 | reader.readText() shouldBe "abc"
63 | }
64 | }
65 | }
66 | }
--------------------------------------------------------------------------------
/src/test/kotlin/io/lenses/jdbc4/resultset/ListResultSetTest.kt:
--------------------------------------------------------------------------------
1 | package io.lenses.jdbc4.resultset
2 |
3 | import io.kotlintest.shouldBe
4 | import io.kotlintest.shouldThrow
5 | import io.kotlintest.specs.WordSpec
6 | import io.lenses.jdbc4.row.RecordRow
7 | import org.apache.avro.SchemaBuilder
8 | import org.apache.avro.generic.GenericRecordBuilder
9 | import java.sql.ResultSet
10 | import java.sql.SQLFeatureNotSupportedException
11 |
12 | class ListResultSetTest : WordSpec() {
13 | init {
14 | "ListResultSet" should {
15 | "support findColumn as 1-indexed" {
16 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").optionalString("goo").endRecord()
17 | val records = listOf(RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()))
18 | val rs = ListResultSet(null, schema, records)
19 | rs.next()
20 | rs.findColumn("foo") shouldBe 1
21 | rs.findColumn("goo") shouldBe 2
22 | }
23 | "use next to iterate records" {
24 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
25 | val records = listOf(
26 | RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()),
27 | RecordRow(GenericRecordBuilder(schema).set("foo", "boo").build())
28 | )
29 | val rs = ListResultSet(null, schema, records)
30 | rs.next()
31 | rs.getString(1) shouldBe "woo"
32 | rs.next()
33 | rs.getString(1) shouldBe "boo"
34 | }
35 | "support by label lookup" {
36 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").optionalString("goo").endRecord()
37 | val records = listOf(RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()))
38 | val rs = ListResultSet(null, schema, records)
39 | rs.next()
40 | rs.getString("foo") shouldBe "woo"
41 | rs.getString("goo") shouldBe null
42 | }
43 | "getString of null should return null" {
44 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
45 | val records = listOf(RecordRow(GenericRecordBuilder(schema).set("foo", null).build()))
46 | val rs = ListResultSet(null, schema, records)
47 | rs.next()
48 | rs.getString("foo") shouldBe null
49 | }
50 | "support absolute positive position" {
51 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
52 | val records = listOf(
53 | RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()),
54 | RecordRow(GenericRecordBuilder(schema).set("foo", "goo").build()),
55 | RecordRow(GenericRecordBuilder(schema).set("foo", "boo").build())
56 | )
57 | val rs = ListResultSet(null, schema, records)
58 | rs.absolute(3)
59 | rs.getString(1) shouldBe "boo"
60 | rs.absolute(1)
61 | rs.getString(1) shouldBe "woo"
62 | }
63 | "support absolute negative position" {
64 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
65 | val records = listOf(
66 | RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()),
67 | RecordRow(GenericRecordBuilder(schema).set("foo", "boo").build()),
68 | RecordRow(GenericRecordBuilder(schema).set("foo", "goo").build())
69 | )
70 | val rs = ListResultSet(null, schema, records)
71 | // -1 is defined as the last result
72 | rs.absolute(-1)
73 | rs.getString(1) shouldBe "goo"
74 | rs.absolute(-3)
75 | rs.getString(1) shouldBe "woo"
76 | rs.absolute(-10)
77 | rs.isBeforeFirst shouldBe true
78 | }
79 | "support relative positioning" {
80 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
81 | val records = listOf(
82 | RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()),
83 | RecordRow(GenericRecordBuilder(schema).set("foo", "boo").build()),
84 | RecordRow(GenericRecordBuilder(schema).set("foo", "goo").build())
85 | )
86 | val rs = ListResultSet(null, schema, records)
87 | // adding 2 should move us onto the second result
88 | rs.relative(2)
89 | rs.getString(1) shouldBe "boo"
90 | rs.relative(1)
91 | rs.getString(1) shouldBe "goo"
92 | }
93 | "support first() and isFirst()" {
94 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
95 | val records = listOf(
96 | RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()),
97 | RecordRow(GenericRecordBuilder(schema).set("foo", "boo").build()),
98 | RecordRow(GenericRecordBuilder(schema).set("foo", "goo").build())
99 | )
100 | val rs = ListResultSet(null, schema, records)
101 | rs.isFirst shouldBe false
102 | rs.next()
103 | rs.isFirst shouldBe true
104 | rs.next()
105 | rs.isFirst shouldBe false
106 | rs.first()
107 | rs.isFirst shouldBe true
108 | }
109 | "support last() and isLast()" {
110 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
111 | val records = listOf(
112 | RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()),
113 | RecordRow(GenericRecordBuilder(schema).set("foo", "boo").build()),
114 | RecordRow(GenericRecordBuilder(schema).set("foo", "goo").build())
115 | )
116 | val rs = ListResultSet(null, schema, records)
117 | rs.isLast shouldBe false
118 | rs.last()
119 | rs.isLast shouldBe true
120 | rs.previous()
121 | rs.isLast shouldBe false
122 | }
123 | "support isAfterLast()" {
124 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
125 | val records = listOf(
126 | RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()),
127 | RecordRow(GenericRecordBuilder(schema).set("foo", "boo").build()),
128 | RecordRow(GenericRecordBuilder(schema).set("foo", "goo").build())
129 | )
130 | val rs = ListResultSet(null, schema, records)
131 | rs.isAfterLast shouldBe false
132 | rs.last()
133 | rs.isAfterLast shouldBe false
134 | rs.next()
135 | rs.isAfterLast shouldBe true
136 | }
137 | "support beforeFirst and isBeforeFirst()" {
138 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
139 | val records = listOf(
140 | RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()),
141 | RecordRow(GenericRecordBuilder(schema).set("foo", null).build())
142 | )
143 | val rs = ListResultSet(null, schema, records)
144 | // should start as before first
145 | rs.isBeforeFirst shouldBe true
146 | // move on past the marker
147 | rs.next()
148 | rs.isBeforeFirst shouldBe false
149 | // back to before first again
150 | rs.beforeFirst()
151 | rs.isBeforeFirst shouldBe true
152 | }
153 | "return READ ONLY for concurrency" {
154 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
155 | val records = listOf(RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()))
156 | ListResultSet(null, schema, records).concurrency shouldBe ResultSet.CONCUR_READ_ONLY
157 | }
158 | "return true for isClosed" {
159 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
160 | val records = listOf(RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()))
161 | ListResultSet(null, schema, records).isClosed shouldBe true
162 | }
163 | "return TYPE_SCROLL_INSENSITIVE for type" {
164 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
165 | val records = listOf(RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()))
166 | ListResultSet(null, schema, records).type shouldBe ResultSet.TYPE_SCROLL_INSENSITIVE
167 | }
168 | "return -1 for fetch size" {
169 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
170 | val records = listOf(RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()))
171 | ListResultSet(null, schema, records).fetchSize shouldBe -1
172 | }
173 | "track last value to support wasNull" {
174 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
175 | val records = listOf(
176 | RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()),
177 | RecordRow(GenericRecordBuilder(schema).set("foo", null).build())
178 | )
179 | val rs = ListResultSet(null, schema, records)
180 | rs.next()
181 | rs.getString("foo") shouldBe "woo"
182 | rs.wasNull() shouldBe false
183 |
184 | rs.next()
185 | rs.getString("foo") shouldBe null
186 | rs.wasNull() shouldBe true
187 | }
188 | "throw SQLFeatureNotSupportedException for deletion methods" {
189 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
190 | val records = listOf(RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()))
191 | val rs = ListResultSet(null, schema, records)
192 |
193 | shouldThrow {
194 | rs.deleteRow()
195 | }
196 |
197 | shouldThrow {
198 | rs.rowDeleted()
199 | }
200 | }
201 | "throw SQLFeatureNotSupportedException for update methods" {
202 | val schema = SchemaBuilder.record("wibble").fields().optionalString("foo").endRecord()
203 | val records = listOf(RecordRow(GenericRecordBuilder(schema).set("foo", "woo").build()))
204 | val rs = ListResultSet(null, schema, records)
205 |
206 | shouldThrow {
207 | rs.rowUpdated()
208 | }
209 |
210 | shouldThrow {
211 | rs.cancelRowUpdates()
212 | }
213 |
214 | shouldThrow {
215 | rs.updateRow()
216 | }
217 | }
218 | }
219 | }
220 | }
--------------------------------------------------------------------------------
/src/test/resources/keystore.jks:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lensesio/lenses-jdbc/4ea41f66879c6e506b42432f6899cf6376f26957/src/test/resources/keystore.jks
--------------------------------------------------------------------------------
/src/test/resources/log4j2.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/src/test/resources/messages.json:
--------------------------------------------------------------------------------
1 | {
2 | "messages": [
3 | {
4 | "timestamp": 1515438609862,
5 | "partition": 0,
6 | "key": "5290441401157247",
7 | "offset": 7,
8 | "topic": "cc_payments",
9 | "value": "{\"id\":\"txn1515438609862\",\"time\":\"2018-01-08T19:10:09.862Z\",\"amount\":3593.700000000000000000,\"currency\":\"USD\",\"creditCardId\":\"5290441401157247\",\"merchantId\":15}"
10 | },
11 | {
12 | "timestamp": 1515438610464,
13 | "partition": 0,
14 | "key": "5390713494347532",
15 | "offset": 10,
16 | "topic": "cc_payments",
17 | "value": "{\"id\":\"txn1515438610463\",\"time\":\"2018-01-08T19:10:10.463Z\",\"amount\":2470.570000000000000000,\"currency\":\"USD\",\"creditCardId\":\"5390713494347532\",\"merchantId\":74}"
18 | },
19 | {
20 | "timestamp": 1515438610664,
21 | "partition": 0,
22 | "key": "5290441401157247",
23 | "offset": 11,
24 | "topic": "cc_payments",
25 | "value": "{\"id\":\"txn1515438610664\",\"time\":\"2018-01-08T19:10:10.664Z\",\"amount\":3676.860000000000000000,\"currency\":\"USD\",\"creditCardId\":\"5290441401157247\",\"merchantId\":34}"
26 | }
27 | ],
28 | "offsets": [
29 | {
30 | "partition": 0,
31 | "min": 0,
32 | "max": 48889
33 | }
34 | ]
35 | }
--------------------------------------------------------------------------------